Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBarry Haddow <barry.haddow@gmail.com>2011-12-09 03:17:30 +0400
committerBarry Haddow <barry.haddow@gmail.com>2011-12-09 03:17:30 +0400
commit072848a4dfe3fac6c1398684bea46020611f9e3a (patch)
tree4b6a97f423ab929191802787c0983844afd9fd6d
parent15091ab70ce249cb1c7b714d13fd5f477ddab3bf (diff)
parentcb5213ac438abb24a75dfdff5f7b8b7558518267 (diff)
Merge branch 'master' into bjam. Doesn't compile.
Conflicts: .gitignore Makefile.am config.h.in configure.in m4/boost.m4 moses/src/LM/Factory.cpp moses/src/LM/ParallelBackoff.h moses/src/LM/RandLM.h moses/src/Makefile.am moses/src/PDTAimp.h moses/src/Phrase.h moses/src/PhraseDictionaryTree.cpp moses/src/ScoreComponentCollection.h moses/src/ScoreIndexManager.cpp moses/src/StaticData.cpp moses/src/TargetPhrase.cpp moses/src/TranslationOptionCollection.cpp moses/src/TranslationSystem.cpp scripts/Makefile scripts/released-files
-rw-r--r--.gitignore77
-rw-r--r--BUILD-INSTRUCTIONS.txt131
-rwxr-xr-xCLEANUP-AFTER-AUTOTOOLS.sh337
-rw-r--r--CreateOnDisk/src/Makefile.am6
-rw-r--r--Jamroot208
-rw-r--r--Makefile.am17
-rw-r--r--OnDiskPt/Jamfile2
-rw-r--r--OnDiskPt/Main.cpp (renamed from CreateOnDisk/src/Main.cpp)18
-rw-r--r--OnDiskPt/Main.h (renamed from CreateOnDisk/src/Main.h)4
-rw-r--r--OnDiskPt/OnDiskWrapper.cpp (renamed from OnDiskPt/src/OnDiskWrapper.cpp)38
-rw-r--r--OnDiskPt/OnDiskWrapper.h (renamed from OnDiskPt/src/OnDiskWrapper.h)2
-rw-r--r--OnDiskPt/Phrase.cpp (renamed from OnDiskPt/src/Phrase.cpp)8
-rw-r--r--OnDiskPt/Phrase.h (renamed from OnDiskPt/src/Phrase.h)0
-rw-r--r--OnDiskPt/PhraseNode.cpp (renamed from OnDiskPt/src/PhraseNode.cpp)24
-rw-r--r--OnDiskPt/PhraseNode.h (renamed from OnDiskPt/src/PhraseNode.h)0
-rw-r--r--OnDiskPt/SourcePhrase.cpp (renamed from OnDiskPt/src/SourcePhrase.cpp)2
-rw-r--r--OnDiskPt/SourcePhrase.h (renamed from OnDiskPt/src/SourcePhrase.h)0
-rw-r--r--OnDiskPt/TargetPhrase.cpp (renamed from OnDiskPt/src/TargetPhrase.cpp)30
-rw-r--r--OnDiskPt/TargetPhrase.h (renamed from OnDiskPt/src/TargetPhrase.h)0
-rw-r--r--OnDiskPt/TargetPhraseCollection.cpp (renamed from OnDiskPt/src/TargetPhraseCollection.cpp)8
-rw-r--r--OnDiskPt/TargetPhraseCollection.h (renamed from OnDiskPt/src/TargetPhraseCollection.h)0
-rw-r--r--OnDiskPt/Vocab.cpp (renamed from OnDiskPt/src/Vocab.cpp)4
-rw-r--r--OnDiskPt/Vocab.h (renamed from OnDiskPt/src/Vocab.h)2
-rw-r--r--OnDiskPt/Word.cpp (renamed from OnDiskPt/src/Word.cpp)6
-rw-r--r--OnDiskPt/Word.h (renamed from OnDiskPt/src/Word.h)0
-rw-r--r--OnDiskPt/src/Makefile.am14
-rwxr-xr-xbjam22
-rwxr-xr-xconfig.guess1463
-rw-r--r--config.h.in122
-rwxr-xr-xconfig.sub1579
-rw-r--r--configure.in364
-rwxr-xr-xcontrib/Extract_TMX_Corpus/Extract_TMX_Corpus.py (renamed from scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.py)0
-rw-r--r--contrib/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py)0
-rw-r--r--contrib/Extract_TMX_Corpus/LanguageCodes.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt)0
-rw-r--r--contrib/Extract_TMX_Corpus/LanguagePairs.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt)0
-rw-r--r--contrib/Extract_TMX_Corpus/_READ_ME_FIRST.txt (renamed from scripts/other/Extract_TMX_Corpus/_READ_ME_FIRST.txt)0
-rw-r--r--contrib/Extract_TMX_Corpus/gpl.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt)0
-rw-r--r--contrib/Moses2TMX/LanguageCodes.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt)0
-rwxr-xr-xcontrib/Moses2TMX/Moses2TMX.py (renamed from scripts/other/Moses2TMX/Moses2TMX.py)0
-rw-r--r--contrib/Moses2TMX/Moses2TMX.rsrc.py (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py)0
-rw-r--r--contrib/Moses2TMX/_READ_ME_FIRST.txt (renamed from scripts/other/Moses2TMX/_READ_ME_FIRST.txt)0
-rw-r--r--contrib/Moses2TMX/gpl.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt)0
-rw-r--r--contrib/checkplf/checkplf.cpp (renamed from moses-cmd/src/checkplf.cpp)0
-rw-r--r--contrib/eppex/ISS.h (renamed from scripts/training/eppex/ISS.h)0
-rw-r--r--contrib/eppex/IndexedPhrasesPair.h (renamed from scripts/training/eppex/IndexedPhrasesPair.h)0
-rw-r--r--contrib/eppex/LossyCounter.h (renamed from scripts/training/eppex/LossyCounter.h)0
-rw-r--r--contrib/eppex/Makefile.am (renamed from scripts/training/eppex/Makefile.am)0
-rw-r--r--contrib/eppex/Makefile.in762
-rw-r--r--contrib/eppex/SafeGetline.h (renamed from scripts/training/eppex/SafeGetline.h)0
-rw-r--r--contrib/eppex/aclocal.m4 (renamed from scripts/training/eppex/aclocal.m4)0
-rw-r--r--contrib/eppex/config.h.in (renamed from scripts/training/eppex/config.h.in)0
-rwxr-xr-xcontrib/eppex/configure (renamed from scripts/training/eppex/configure)0
-rw-r--r--contrib/eppex/configure.ac (renamed from scripts/training/eppex/configure.ac)0
-rw-r--r--contrib/eppex/counter.cpp (renamed from scripts/training/eppex/counter.cpp)0
-rwxr-xr-xcontrib/eppex/depcomp (renamed from scripts/training/eppex/depcomp)0
-rw-r--r--contrib/eppex/eppex.cpp (renamed from scripts/training/eppex/eppex.cpp)0
-rwxr-xr-xcontrib/eppex/install-sh (renamed from scripts/training/eppex/install-sh)0
-rw-r--r--contrib/eppex/m4/ax_boost_base.m4 (renamed from scripts/training/eppex/m4/ax_boost_base.m4)0
-rwxr-xr-xcontrib/eppex/missing (renamed from scripts/training/eppex/missing)0
-rw-r--r--contrib/eppex/phrase-extract.cpp (renamed from scripts/training/eppex/phrase-extract.cpp)0
-rw-r--r--contrib/eppex/phrase-extract.h (renamed from scripts/training/eppex/phrase-extract.h)0
-rw-r--r--contrib/eppex/shared.cpp (renamed from scripts/training/eppex/shared.cpp)0
-rw-r--r--contrib/eppex/shared.h (renamed from scripts/training/eppex/shared.h)0
-rw-r--r--contrib/eppex/typedefs.h (renamed from scripts/training/eppex/typedefs.h)0
-rw-r--r--contrib/lmserver/AUTHORS (renamed from lmserver/AUTHORS)0
-rwxr-xr-xcontrib/lmserver/BUILD (renamed from lmserver/BUILD)0
-rw-r--r--contrib/lmserver/COPYING (renamed from lmserver/COPYING)0
-rw-r--r--contrib/lmserver/ChangeLog (renamed from lmserver/ChangeLog)0
l---------contrib/lmserver/INSTALL (renamed from lmserver/INSTALL)0
-rw-r--r--contrib/lmserver/Makefile.am (renamed from lmserver/Makefile.am)0
-rw-r--r--contrib/lmserver/Makefile.in (renamed from lmserver/Makefile.in)0
-rw-r--r--contrib/lmserver/NEWS (renamed from lmserver/NEWS)0
-rw-r--r--contrib/lmserver/README (renamed from lmserver/README)0
-rw-r--r--contrib/lmserver/aclocal.m4 (renamed from lmserver/aclocal.m4)0
-rwxr-xr-xcontrib/lmserver/compile (renamed from lmserver/compile)0
-rwxr-xr-xcontrib/lmserver/config.guess (renamed from lmserver/config.guess)0
-rw-r--r--contrib/lmserver/config.h.in (renamed from lmserver/config.h.in)0
-rwxr-xr-xcontrib/lmserver/config.status (renamed from lmserver/config.status)0
-rwxr-xr-xcontrib/lmserver/config.sub (renamed from lmserver/config.sub)0
-rwxr-xr-xcontrib/lmserver/configure (renamed from lmserver/configure)0
-rw-r--r--contrib/lmserver/configure.ac (renamed from lmserver/configure.ac)0
-rw-r--r--contrib/lmserver/daemon.c (renamed from lmserver/daemon.c)0
-rwxr-xr-xcontrib/lmserver/depcomp (renamed from lmserver/depcomp)0
-rw-r--r--contrib/lmserver/examples/LMClient.java (renamed from lmserver/examples/LMClient.java)0
-rw-r--r--contrib/lmserver/examples/LMClient.pm (renamed from lmserver/examples/LMClient.pm)0
-rw-r--r--contrib/lmserver/examples/lmclient.cc (renamed from lmserver/examples/lmclient.cc)0
-rwxr-xr-xcontrib/lmserver/examples/query_lmserver.pl (renamed from lmserver/examples/query_lmserver.pl)0
-rwxr-xr-xcontrib/lmserver/install-sh (renamed from lmserver/install-sh)0
-rw-r--r--contrib/lmserver/lmserver.c (renamed from lmserver/lmserver.c)0
-rw-r--r--contrib/lmserver/lmserver.h (renamed from lmserver/lmserver.h)0
-rwxr-xr-xcontrib/lmserver/missing (renamed from lmserver/missing)0
-rw-r--r--contrib/lmserver/srilm.cc (renamed from lmserver/srilm.cc)0
-rw-r--r--contrib/lmserver/srilm.h (renamed from lmserver/srilm.h)0
-rw-r--r--contrib/lmserver/stamp-h1 (renamed from lmserver/stamp-h1)0
-rw-r--r--contrib/lmserver/stats.h (renamed from lmserver/stats.h)0
-rw-r--r--contrib/lmserver/thread.c (renamed from lmserver/thread.c)0
-rw-r--r--contrib/memscore/Makefile.am (renamed from scripts/training/memscore/Makefile.am)0
-rw-r--r--contrib/memscore/Makefile.in581
-rw-r--r--contrib/memscore/aclocal.m4 (renamed from scripts/training/memscore/aclocal.m4)0
-rw-r--r--contrib/memscore/config.h.in (renamed from scripts/training/memscore/config.h.in)0
-rwxr-xr-xcontrib/memscore/configure (renamed from scripts/training/memscore/configure)0
-rw-r--r--contrib/memscore/configure.ac (renamed from scripts/training/memscore/configure.ac)0
-rw-r--r--contrib/memscore/datastorage.h (renamed from scripts/training/memscore/datastorage.h)0
-rwxr-xr-xcontrib/memscore/depcomp (renamed from scripts/training/memscore/depcomp)0
-rwxr-xr-xcontrib/memscore/install-sh (renamed from scripts/training/memscore/install-sh)0
-rw-r--r--contrib/memscore/lexdecom.cpp (renamed from scripts/training/memscore/lexdecom.cpp)0
-rw-r--r--contrib/memscore/lexdecom.h (renamed from scripts/training/memscore/lexdecom.h)0
-rw-r--r--contrib/memscore/m4/ax_boost_base.m4 (renamed from scripts/training/memscore/m4/ax_boost_base.m4)0
-rw-r--r--contrib/memscore/memscore.cpp (renamed from scripts/training/memscore/memscore.cpp)0
-rw-r--r--contrib/memscore/memscore.h (renamed from scripts/training/memscore/memscore.h)0
-rwxr-xr-xcontrib/memscore/missing (renamed from scripts/training/memscore/missing)0
-rw-r--r--contrib/memscore/phraselm.cpp (renamed from scripts/training/memscore/phraselm.cpp)0
-rw-r--r--contrib/memscore/phraselm.h (renamed from scripts/training/memscore/phraselm.h)0
-rw-r--r--contrib/memscore/phrasetable.cpp (renamed from scripts/training/memscore/phrasetable.cpp)0
-rw-r--r--contrib/memscore/phrasetable.h (renamed from scripts/training/memscore/phrasetable.h)0
-rw-r--r--contrib/memscore/scorer-impl.h (renamed from scripts/training/memscore/scorer-impl.h)0
-rw-r--r--contrib/memscore/scorer.cpp (renamed from scripts/training/memscore/scorer.cpp)0
-rw-r--r--contrib/memscore/scorer.h (renamed from scripts/training/memscore/scorer.h)0
-rw-r--r--contrib/memscore/statistic.h (renamed from scripts/training/memscore/statistic.h)0
-rw-r--r--contrib/memscore/timestamp.h (renamed from scripts/training/memscore/timestamp.h)0
-rw-r--r--contrib/moses-for-mere-mortals/READ_ME_FIRST.txt (renamed from scripts/moses-for-mere-mortals/READ_ME_FIRST.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py (renamed from scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt (renamed from scripts/other/Extract_TMX_Corpus/LanguageCodes.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt (renamed from scripts/other/Extract_TMX_Corpus/LanguagePairs.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt (renamed from scripts/other/Extract_TMX_Corpus/gpl.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt (renamed from scripts/other/Moses2TMX/LanguageCodes.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py (renamed from scripts/other/Moses2TMX/Moses2TMX.rsrc.py)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt (renamed from scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt (renamed from scripts/other/Moses2TMX/gpl.txt)0
-rw-r--r--contrib/moses-for-mere-mortals/all.css (renamed from scripts/moses-for-mere-mortals/all.css)0
-rw-r--r--contrib/moses-for-mere-mortals/docs/Help-Tutorial.doc (renamed from scripts/moses-for-mere-mortals/docs/Help-Tutorial.doc)bin565248 -> 565248 bytes
-rw-r--r--contrib/moses-for-mere-mortals/docs/Overview.jpeg (renamed from scripts/moses-for-mere-mortals/docs/Overview.jpeg)bin207618 -> 207618 bytes
-rw-r--r--contrib/moses-for-mere-mortals/docs/Quick-Start-Guide.doc (renamed from scripts/moses-for-mere-mortals/docs/Quick-Start-Guide.doc)bin16896 -> 16896 bytes
-rw-r--r--contrib/moses-for-mere-mortals/docs/all.css (renamed from scripts/moses-for-mere-mortals/docs/all.css)0
-rw-r--r--contrib/moses-for-mere-mortals/docs/thanks.html (renamed from scripts/moses-for-mere-mortals/docs/thanks.html)0
-rw-r--r--contrib/moses-for-mere-mortals/index.html (renamed from scripts/moses-for-mere-mortals/index.html)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/create-1.37 (renamed from scripts/moses-for-mere-mortals/scripts/create-1.37)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/make-test-files-0.14 (renamed from scripts/moses-for-mere-mortals/scripts/make-test-files-0.14)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST (renamed from scripts/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl (renamed from scripts/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt (renamed from scripts/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/score-0.85 (renamed from scripts/moses-for-mere-mortals/scripts/score-0.85)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/train-1.11 (renamed from scripts/moses-for-mere-mortals/scripts/train-1.11)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07 (renamed from scripts/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07)0
-rw-r--r--contrib/moses-for-mere-mortals/scripts/translate-1.32 (renamed from scripts/moses-for-mere-mortals/scripts/translate-1.32)0
-rw-r--r--contrib/other-builds/CreateOnDisk.vcxproj (renamed from CreateOnDisk/CreateOnDisk.vcxproj)0
-rw-r--r--contrib/other-builds/CreateOnDisk.xcodeproj/project.pbxproj (renamed from CreateOnDisk/CreateOnDisk.xcodeproj/project.pbxproj)0
-rw-r--r--contrib/other-builds/OnDiskPt.vcxproj (renamed from OnDiskPt/OnDiskPt.vcxproj)0
-rw-r--r--contrib/other-builds/OnDiskPt.xcodeproj/project.pbxproj (renamed from OnDiskPt/OnDiskPt.xcodeproj/project.pbxproj)2
-rw-r--r--contrib/other-builds/moses-chart-cmd.vcxproj (renamed from moses-chart-cmd/moses-chart-cmd.vcxproj)0
-rw-r--r--contrib/other-builds/moses-chart-cmd.xcodeproj/project.pbxproj (renamed from moses-chart-cmd/moses-chart-cmd.xcodeproj/project.pbxproj)97
-rw-r--r--contrib/other-builds/moses-cmd.vcxproj (renamed from moses-cmd/moses-cmd.vcxproj)0
-rw-r--r--contrib/other-builds/moses-cmd.xcodeproj/project.pbxproj (renamed from moses-cmd/moses-cmd.xcodeproj/project.pbxproj)72
-rw-r--r--contrib/other-builds/moses.sln (renamed from moses.sln)0
-rw-r--r--contrib/other-builds/moses.vcxproj (renamed from moses/moses.vcxproj)0
-rw-r--r--contrib/other-builds/moses.xcodeproj/project.pbxproj (renamed from moses/moses.xcodeproj/project.pbxproj)6
-rw-r--r--contrib/reranking/data/README (renamed from reranking/data/README)0
-rw-r--r--contrib/reranking/data/nbest.small (renamed from reranking/data/nbest.small)0
-rw-r--r--contrib/reranking/data/weights (renamed from reranking/data/weights)0
-rw-r--r--contrib/reranking/src/Hypo.cpp (renamed from reranking/src/Hypo.cpp)0
-rw-r--r--contrib/reranking/src/Hypo.h (renamed from reranking/src/Hypo.h)0
-rw-r--r--contrib/reranking/src/Main.cpp (renamed from reranking/src/Main.cpp)0
-rw-r--r--contrib/reranking/src/Makefile (renamed from reranking/src/Makefile)0
-rw-r--r--contrib/reranking/src/NBest.cpp (renamed from reranking/src/NBest.cpp)0
-rw-r--r--contrib/reranking/src/NBest.h (renamed from reranking/src/NBest.h)0
-rw-r--r--contrib/reranking/src/ParameterNBest.cpp (renamed from reranking/src/ParameterNBest.cpp)0
-rw-r--r--contrib/reranking/src/ParameterNBest.h (renamed from reranking/src/ParameterNBest.h)0
-rw-r--r--contrib/reranking/src/Tools.cpp (renamed from reranking/src/Tools.cpp)0
-rw-r--r--contrib/reranking/src/Tools.h (renamed from reranking/src/Tools.h)0
-rw-r--r--contrib/server/Jamfile41
-rwxr-xr-xcontrib/server/client.perl (renamed from server/client.perl)0
-rw-r--r--contrib/server/mosesserver.cpp (renamed from server/mosesserver.cpp)8
-rwxr-xr-xcontrib/server/sgclient.perl (renamed from server/sgclient.perl)0
-rw-r--r--contrib/sigtest-filter/Makefile (renamed from sigtest-filter/Makefile)0
-rw-r--r--contrib/sigtest-filter/README.txt (renamed from sigtest-filter/README.txt)0
-rw-r--r--contrib/sigtest-filter/WIN32_functions.cpp (renamed from sigtest-filter/WIN32_functions.cpp)0
-rw-r--r--contrib/sigtest-filter/WIN32_functions.h (renamed from sigtest-filter/WIN32_functions.h)0
-rwxr-xr-xcontrib/sigtest-filter/check-install (renamed from sigtest-filter/check-install)0
-rw-r--r--contrib/sigtest-filter/filter-pt.cpp (renamed from sigtest-filter/filter-pt.cpp)0
-rw-r--r--contrib/sigtest-filter/sigtest-filter.sln (renamed from sigtest-filter/sigtest-filter.sln)0
-rw-r--r--contrib/sigtest-filter/sigtest-filter.vcproj (renamed from sigtest-filter/sigtest-filter.vcproj)0
-rw-r--r--contrib/synlm/hhmm/LICENSE (renamed from synlm/hhmm/LICENSE)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-archetypeset.h (renamed from synlm/hhmm/rvtl/include/nl-archetypeset.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-array.h (renamed from synlm/hhmm/rvtl/include/nl-array.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-beam.h (renamed from synlm/hhmm/rvtl/include/nl-beam.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-const.h (renamed from synlm/hhmm/rvtl/include/nl-const.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-cpt.h (renamed from synlm/hhmm/rvtl/include/nl-cpt.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-crf.h (renamed from synlm/hhmm/rvtl/include/nl-crf.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-denot.h (renamed from synlm/hhmm/rvtl/include/nl-denot.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-dtree-cont.h (renamed from synlm/hhmm/rvtl/include/nl-dtree-cont.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-dtree.h (renamed from synlm/hhmm/rvtl/include/nl-dtree.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-fixedmatrix.h (renamed from synlm/hhmm/rvtl/include/nl-fixedmatrix.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-gauss.h (renamed from synlm/hhmm/rvtl/include/nl-gauss.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-hash.h (renamed from synlm/hhmm/rvtl/include/nl-hash.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-heap.h (renamed from synlm/hhmm/rvtl/include/nl-heap.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-hmm.h (renamed from synlm/hhmm/rvtl/include/nl-hmm.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-hmm2.h (renamed from synlm/hhmm/rvtl/include/nl-hmm2.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-hmmloop.h (renamed from synlm/hhmm/rvtl/include/nl-hmmloop.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-iomacros.h (renamed from synlm/hhmm/rvtl/include/nl-iomacros.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-linsep.h (renamed from synlm/hhmm/rvtl/include/nl-linsep.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-list.h (renamed from synlm/hhmm/rvtl/include/nl-list.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-matrix.h (renamed from synlm/hhmm/rvtl/include/nl-matrix.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-mixture.h (renamed from synlm/hhmm/rvtl/include/nl-mixture.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-modelfile.h (renamed from synlm/hhmm/rvtl/include/nl-modelfile.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-oblidtree.h (renamed from synlm/hhmm/rvtl/include/nl-oblidtree.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-prob.h (renamed from synlm/hhmm/rvtl/include/nl-prob.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-probmodel.h (renamed from synlm/hhmm/rvtl/include/nl-probmodel.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-racpt.h (renamed from synlm/hhmm/rvtl/include/nl-racpt.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-randvar.h (renamed from synlm/hhmm/rvtl/include/nl-randvar.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-refrv.h (renamed from synlm/hhmm/rvtl/include/nl-refrv.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-safeids.h (renamed from synlm/hhmm/rvtl/include/nl-safeids.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-stream.h (renamed from synlm/hhmm/rvtl/include/nl-stream.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-string.h (renamed from synlm/hhmm/rvtl/include/nl-string.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-stringindex.h (renamed from synlm/hhmm/rvtl/include/nl-stringindex.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-tetrahex.h (renamed from synlm/hhmm/rvtl/include/nl-tetrahex.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-timer.h (renamed from synlm/hhmm/rvtl/include/nl-timer.h)0
-rw-r--r--contrib/synlm/hhmm/rvtl/include/nl-tree.h (renamed from synlm/hhmm/rvtl/include/nl-tree.h)0
-rw-r--r--contrib/synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h (renamed from synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h)0
-rw-r--r--contrib/synlm/hhmm/wsjparse/include/TextObsModel.h (renamed from synlm/hhmm/wsjparse/include/TextObsModel.h)0
-rw-r--r--contrib/synlm/hhmm/wsjparse/include/TextObsVars.h (renamed from synlm/hhmm/wsjparse/include/TextObsVars.h)0
-rw-r--r--contrib/web/bin/daemon.pl (renamed from web/bin/daemon.pl)0
-rw-r--r--contrib/web/bin/detokenizer.perl (renamed from web/bin/detokenizer.perl)0
-rw-r--r--contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.de (renamed from web/bin/nonbreaking_prefixes/nonbreaking_prefix.de)0
-rw-r--r--contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.el (renamed from web/bin/nonbreaking_prefixes/nonbreaking_prefix.el)0
-rw-r--r--contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.en (renamed from web/bin/nonbreaking_prefixes/nonbreaking_prefix.en)0
-rw-r--r--contrib/web/bin/start-daemon-cluster.pl (renamed from web/bin/start-daemon-cluster.pl)0
-rw-r--r--contrib/web/bin/tokenizer.perl (renamed from web/bin/tokenizer.perl)0
-rw-r--r--contrib/web/index.cgi (renamed from web/index.cgi)0
-rw-r--r--contrib/web/index.js (renamed from web/index.js)0
-rw-r--r--contrib/web/lib/RemoteProcess.pm (renamed from web/lib/RemoteProcess.pm)0
-rw-r--r--contrib/web/lib/Subprocess.pm (renamed from web/lib/Subprocess.pm)0
-rw-r--r--contrib/web/translate.cgi (renamed from web/translate.cgi)0
-rw-r--r--cruise-control/config.ems4
-rwxr-xr-xcruise-control/test_all_new_commits.sh44
-rwxr-xr-xdepcomp441
-rwxr-xr-xinstall-sh276
-rw-r--r--jam-files/LICENSE_1_0.txt23
-rw-r--r--jam-files/boost-build/boost-build.jam8
-rw-r--r--jam-files/boost-build/bootstrap.jam18
-rw-r--r--jam-files/boost-build/build-system.jam1008
-rw-r--r--jam-files/boost-build/build/ac.jam198
-rw-r--r--jam-files/boost-build/build/alias.jam73
-rw-r--r--jam-files/boost-build/build/build-request.jam322
-rw-r--r--jam-files/boost-build/build/configure.jam237
-rw-r--r--jam-files/boost-build/build/feature.jam1335
-rw-r--r--jam-files/boost-build/build/generators.jam1408
-rw-r--r--jam-files/boost-build/build/modifiers.jam232
-rw-r--r--jam-files/boost-build/build/project.jam1110
-rw-r--r--jam-files/boost-build/build/property-set.jam481
-rw-r--r--jam-files/boost-build/build/property.jam788
-rw-r--r--jam-files/boost-build/build/readme.txt13
-rw-r--r--jam-files/boost-build/build/scanner.jam153
-rw-r--r--jam-files/boost-build/build/targets.jam1659
-rw-r--r--jam-files/boost-build/build/toolset.jam502
-rw-r--r--jam-files/boost-build/build/type.jam425
-rw-r--r--jam-files/boost-build/build/version.jam161
-rw-r--r--jam-files/boost-build/build/virtual-target.jam1317
-rw-r--r--jam-files/boost-build/kernel/boost-build.jam5
-rw-r--r--jam-files/boost-build/kernel/bootstrap.jam263
-rw-r--r--jam-files/boost-build/kernel/class.jam420
-rw-r--r--jam-files/boost-build/kernel/errors.jam274
-rw-r--r--jam-files/boost-build/kernel/modules.jam354
-rw-r--r--jam-files/boost-build/options/help.jam212
-rw-r--r--jam-files/boost-build/site-config.jam4
-rw-r--r--jam-files/boost-build/tools/acc.jam118
-rw-r--r--jam-files/boost-build/tools/bison.jam32
-rw-r--r--jam-files/boost-build/tools/boostbook-config.jam13
-rw-r--r--jam-files/boost-build/tools/boostbook.jam727
-rw-r--r--jam-files/boost-build/tools/borland.jam220
-rw-r--r--jam-files/boost-build/tools/builtin.jam960
-rw-r--r--jam-files/boost-build/tools/cast.jam91
-rw-r--r--jam-files/boost-build/tools/clang-darwin.jam170
-rw-r--r--jam-files/boost-build/tools/clang-linux.jam196
-rw-r--r--jam-files/boost-build/tools/clang.jam27
-rw-r--r--jam-files/boost-build/tools/common.jam986
-rw-r--r--jam-files/boost-build/tools/como-linux.jam103
-rw-r--r--jam-files/boost-build/tools/como-win.jam117
-rw-r--r--jam-files/boost-build/tools/como.jam29
-rw-r--r--jam-files/boost-build/tools/convert.jam62
-rw-r--r--jam-files/boost-build/tools/cw-config.jam34
-rw-r--r--jam-files/boost-build/tools/cw.jam246
-rw-r--r--jam-files/boost-build/tools/darwin.jam568
-rw-r--r--jam-files/boost-build/tools/dmc.jam134
-rw-r--r--jam-files/boost-build/tools/docutils.jam84
-rw-r--r--jam-files/boost-build/tools/doxygen-config.jam11
-rw-r--r--jam-files/boost-build/tools/doxygen.jam776
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile3
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.hpp0
-rw-r--r--jam-files/boost-build/tools/fop.jam69
-rw-r--r--jam-files/boost-build/tools/fortran.jam55
-rw-r--r--jam-files/boost-build/tools/gcc.jam1185
-rw-r--r--jam-files/boost-build/tools/generate.jam108
-rw-r--r--jam-files/boost-build/tools/gettext.jam230
-rw-r--r--jam-files/boost-build/tools/gfortran.jam39
-rw-r--r--jam-files/boost-build/tools/hp_cxx.jam181
-rw-r--r--jam-files/boost-build/tools/hpfortran.jam35
-rw-r--r--jam-files/boost-build/tools/ifort.jam44
-rw-r--r--jam-files/boost-build/tools/intel-darwin.jam220
-rw-r--r--jam-files/boost-build/tools/intel-linux.jam250
-rw-r--r--jam-files/boost-build/tools/intel-win.jam184
-rw-r--r--jam-files/boost-build/tools/intel.jam34
-rw-r--r--jam-files/boost-build/tools/lex.jam33
-rw-r--r--jam-files/boost-build/tools/make.jam72
-rw-r--r--jam-files/boost-build/tools/mc.jam44
-rw-r--r--jam-files/boost-build/tools/message.jam55
-rw-r--r--jam-files/boost-build/tools/midl.jam142
-rw-r--r--jam-files/boost-build/tools/mipspro.jam145
-rw-r--r--jam-files/boost-build/tools/mpi.jam583
-rw-r--r--jam-files/boost-build/tools/msvc-config.jam12
-rw-r--r--jam-files/boost-build/tools/msvc.jam1392
-rw-r--r--jam-files/boost-build/tools/notfile.jam74
-rw-r--r--jam-files/boost-build/tools/package.jam165
-rw-r--r--jam-files/boost-build/tools/pathscale.jam168
-rw-r--r--jam-files/boost-build/tools/pch.jam95
-rw-r--r--jam-files/boost-build/tools/pgi.jam147
-rw-r--r--jam-files/boost-build/tools/python-config.jam27
-rw-r--r--jam-files/boost-build/tools/python.jam1267
-rw-r--r--jam-files/boost-build/tools/qcc.jam236
-rw-r--r--jam-files/boost-build/tools/qt.jam17
-rw-r--r--jam-files/boost-build/tools/qt3.jam209
-rw-r--r--jam-files/boost-build/tools/qt4.jam713
-rw-r--r--jam-files/boost-build/tools/quickbook-config.jam44
-rw-r--r--jam-files/boost-build/tools/quickbook.jam361
-rw-r--r--jam-files/boost-build/tools/rc.jam156
-rw-r--r--jam-files/boost-build/tools/stage.jam524
-rw-r--r--jam-files/boost-build/tools/stlport.jam303
-rw-r--r--jam-files/boost-build/tools/sun.jam142
-rw-r--r--jam-files/boost-build/tools/symlink.jam140
-rw-r--r--jam-files/boost-build/tools/testing-aux.jam210
-rw-r--r--jam-files/boost-build/tools/testing.jam581
-rw-r--r--jam-files/boost-build/tools/types/asm.jam4
-rw-r--r--jam-files/boost-build/tools/types/cpp.jam86
-rw-r--r--jam-files/boost-build/tools/types/exe.jam9
-rw-r--r--jam-files/boost-build/tools/types/html.jam4
-rw-r--r--jam-files/boost-build/tools/types/lib.jam74
-rw-r--r--jam-files/boost-build/tools/types/obj.jam9
-rw-r--r--jam-files/boost-build/tools/types/objc.jam26
-rw-r--r--jam-files/boost-build/tools/types/preprocessed.jam9
-rw-r--r--jam-files/boost-build/tools/types/qt.jam10
-rw-r--r--jam-files/boost-build/tools/types/register.jam39
-rw-r--r--jam-files/boost-build/tools/types/rsp.jam4
-rw-r--r--jam-files/boost-build/tools/unix.jam224
-rw-r--r--jam-files/boost-build/tools/vacpp.jam150
-rw-r--r--jam-files/boost-build/tools/whale.jam116
-rw-r--r--jam-files/boost-build/tools/xlf.jam39
-rw-r--r--jam-files/boost-build/tools/xsltproc-config.jam37
-rw-r--r--jam-files/boost-build/tools/xsltproc.jam194
-rw-r--r--jam-files/boost-build/tools/xsltproc/included.xsl11
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xml2
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xsl12
-rw-r--r--jam-files/boost-build/tools/zlib.jam92
-rw-r--r--jam-files/boost-build/user-config.jam92
-rw-r--r--jam-files/boost-build/util/assert.jam336
-rw-r--r--jam-files/boost-build/util/container.jam339
-rw-r--r--jam-files/boost-build/util/doc.jam997
-rw-r--r--jam-files/boost-build/util/indirect.jam115
-rw-r--r--jam-files/boost-build/util/numbers.jam218
-rw-r--r--jam-files/boost-build/util/option.jam109
-rw-r--r--jam-files/boost-build/util/order.jam169
-rw-r--r--jam-files/boost-build/util/os.jam171
-rw-r--r--jam-files/boost-build/util/path.jam934
-rw-r--r--jam-files/boost-build/util/print.jam488
-rw-r--r--jam-files/boost-build/util/regex.jam193
-rw-r--r--jam-files/boost-build/util/sequence.jam335
-rw-r--r--jam-files/boost-build/util/set.jam93
-rw-r--r--jam-files/boost-build/util/string.jam189
-rw-r--r--jam-files/boost-build/util/utility.jam235
-rw-r--r--jam-files/engine/Jambase2473
-rw-r--r--jam-files/engine/boost-jam.spec64
-rw-r--r--jam-files/engine/boost-no-inspect1
-rw-r--r--jam-files/engine/build.bat532
-rw-r--r--jam-files/engine/build.jam1070
-rwxr-xr-xjam-files/engine/build.sh303
-rw-r--r--jam-files/engine/build_vms.com105
-rw-r--r--jam-files/engine/builtins.c2310
-rw-r--r--jam-files/engine/builtins.h69
-rw-r--r--jam-files/engine/bump_version.py80
-rw-r--r--jam-files/engine/class.c141
-rw-r--r--jam-files/engine/class.h13
-rw-r--r--jam-files/engine/command.c100
-rw-r--r--jam-files/engine/command.h61
-rw-r--r--jam-files/engine/compile.c1424
-rw-r--r--jam-files/engine/compile.h82
-rw-r--r--jam-files/engine/debian/changelog72
-rw-r--r--jam-files/engine/debian/control16
-rw-r--r--jam-files/engine/debian/copyright25
-rw-r--r--jam-files/engine/debian/jam.man.sgml236
-rwxr-xr-xjam-files/engine/debian/rules73
-rw-r--r--jam-files/engine/debug.c132
-rw-r--r--jam-files/engine/debug.h54
-rw-r--r--jam-files/engine/execcmd.h45
-rw-r--r--jam-files/engine/execmac.c69
-rw-r--r--jam-files/engine/execnt.c1296
-rw-r--r--jam-files/engine/execunix.c569
-rw-r--r--jam-files/engine/execvms.c161
-rw-r--r--jam-files/engine/expand.c733
-rw-r--r--jam-files/engine/expand.h14
-rw-r--r--jam-files/engine/filemac.c175
-rw-r--r--jam-files/engine/filent.c387
-rw-r--r--jam-files/engine/fileos2.c138
-rw-r--r--jam-files/engine/filesys.c83
-rw-r--r--jam-files/engine/filesys.h60
-rw-r--r--jam-files/engine/fileunix.c501
-rw-r--r--jam-files/engine/filevms.c327
-rw-r--r--jam-files/engine/frames.c22
-rw-r--r--jam-files/engine/frames.h37
-rw-r--r--jam-files/engine/glob.c152
-rw-r--r--jam-files/engine/hash.c459
-rw-r--r--jam-files/engine/hash.h25
-rw-r--r--jam-files/engine/hcache.c434
-rw-r--r--jam-files/engine/hcache.h18
-rw-r--r--jam-files/engine/hdrmacro.c137
-rw-r--r--jam-files/engine/hdrmacro.h14
-rw-r--r--jam-files/engine/headers.c203
-rw-r--r--jam-files/engine/headers.h16
-rw-r--r--jam-files/engine/jam.c632
-rw-r--r--jam-files/engine/jam.h579
-rw-r--r--jam-files/engine/jambase.c1691
-rw-r--r--jam-files/engine/jambase.h15
-rw-r--r--jam-files/engine/jamgram.c1830
-rw-r--r--jam-files/engine/jamgram.h140
-rw-r--r--jam-files/engine/jamgram.y371
-rw-r--r--jam-files/engine/jamgram.yy329
-rw-r--r--jam-files/engine/jamgramtab.h44
-rw-r--r--jam-files/engine/lists.c339
-rw-r--r--jam-files/engine/lists.h108
-rw-r--r--jam-files/engine/make.c814
-rw-r--r--jam-files/engine/make.h41
-rw-r--r--jam-files/engine/make1.c1145
-rw-r--r--jam-files/engine/md5.c381
-rw-r--r--jam-files/engine/md5.h91
-rw-r--r--jam-files/engine/mem.c75
-rw-r--r--jam-files/engine/mem.h134
-rw-r--r--jam-files/engine/mkjambase.c123
-rw-r--r--jam-files/engine/modules.c168
-rw-r--r--jam-files/engine/modules.h37
-rw-r--r--jam-files/engine/modules/order.c144
-rw-r--r--jam-files/engine/modules/path.c32
-rw-r--r--jam-files/engine/modules/property-set.c110
-rw-r--r--jam-files/engine/modules/readme.txt3
-rw-r--r--jam-files/engine/modules/regex.c96
-rw-r--r--jam-files/engine/modules/sequence.c42
-rw-r--r--jam-files/engine/modules/set.c41
-rw-r--r--jam-files/engine/native.c36
-rw-r--r--jam-files/engine/native.h34
-rw-r--r--jam-files/engine/newstr.c174
-rw-r--r--jam-files/engine/newstr.h14
-rw-r--r--jam-files/engine/option.c94
-rw-r--r--jam-files/engine/option.h23
-rw-r--r--jam-files/engine/output.c125
-rw-r--r--jam-files/engine/output.h29
-rw-r--r--jam-files/engine/parse.c132
-rw-r--r--jam-files/engine/parse.h59
-rw-r--r--jam-files/engine/patchlevel.h17
-rw-r--r--jam-files/engine/pathmac.c252
-rw-r--r--jam-files/engine/pathsys.h91
-rw-r--r--jam-files/engine/pathunix.c457
-rw-r--r--jam-files/engine/pathvms.c406
-rw-r--r--jam-files/engine/pwd.c66
-rw-r--r--jam-files/engine/pwd.h10
-rw-r--r--jam-files/engine/regexp.c1328
-rw-r--r--jam-files/engine/regexp.h32
-rw-r--r--jam-files/engine/rules.c810
-rw-r--r--jam-files/engine/rules.h280
-rw-r--r--jam-files/engine/scan.c418
-rw-r--r--jam-files/engine/scan.h56
-rw-r--r--jam-files/engine/search.c223
-rw-r--r--jam-files/engine/search.h11
-rw-r--r--jam-files/engine/strings.c201
-rw-r--r--jam-files/engine/strings.h34
-rw-r--r--jam-files/engine/subst.c94
-rw-r--r--jam-files/engine/timestamp.c226
-rw-r--r--jam-files/engine/timestamp.h12
-rw-r--r--jam-files/engine/variable.c631
-rw-r--r--jam-files/engine/variable.h35
-rw-r--r--jam-files/engine/w32_getreg.c207
-rw-r--r--jam-files/engine/yyacc.c268
-rwxr-xr-xjam-files/test.sh3
-rw-r--r--lm/Jamfile12
-rw-r--r--lm/Makefile.am25
-rw-r--r--lm/binary_format.cc45
-rw-r--r--lm/build_binary.cc53
-rw-r--r--lm/left_test.cc11
-rw-r--r--lm/lm.xcodeproj/project.pbxproj354
-rw-r--r--lm/model_test.cc24
-rw-r--r--lm/ngram_query.cc59
-rw-r--r--lm/search_hashed.cc16
-rw-r--r--lm/search_hashed.hh57
-rw-r--r--lm/search_trie.cc2
-rw-r--r--lm/vocab.cc30
-rw-r--r--lm/vocab.hh30
-rw-r--r--m4/ax_xmlrpc_c.m452
-rw-r--r--m4/boost.m41045
-rw-r--r--mert/BleuScorer.h2
-rw-r--r--mert/Data.cpp10
-rw-r--r--mert/FeatureDataIterator.h6
-rw-r--r--mert/Jamfile44
-rw-r--r--mert/Makefile.am45
-rw-r--r--mert/Optimizer.cpp20
-rw-r--r--mert/Point.cpp12
-rw-r--r--mert/ScoreDataIterator.h4
-rwxr-xr-xmert/normalise.py72
-rw-r--r--mert/test_scorer.py67
-rw-r--r--misc/GenerateTuples.cpp12
-rw-r--r--misc/Jamfile9
-rw-r--r--misc/Makefile.am16
-rw-r--r--misc/queryLexicalTable.cpp2
-rwxr-xr-xmissing337
-rwxr-xr-xmkinstalldirs111
-rw-r--r--moses-chart-cmd/src/IOWrapper.cpp12
-rw-r--r--moses-chart-cmd/src/Jamfile3
-rw-r--r--moses-chart-cmd/src/Main.cpp10
-rw-r--r--moses-chart-cmd/src/Makefile.am10
-rw-r--r--moses-cmd/src/IOWrapper.cpp8
-rw-r--r--[-rwxr-xr-x]moses-cmd/src/IOWrapper.h2
-rw-r--r--moses-cmd/src/Jamfile8
-rw-r--r--moses-cmd/src/LatticeMBR.cpp16
-rw-r--r--moses-cmd/src/LatticeMBRGrid.cpp4
-rw-r--r--moses-cmd/src/Main.cpp2
-rw-r--r--moses-cmd/src/Makefile.am13
-rw-r--r--moses/src/AlignmentInfo.cpp4
-rw-r--r--moses/src/BilingualDynSuffixArray.cpp36
-rw-r--r--moses/src/BilingualDynSuffixArray.h4
-rw-r--r--moses/src/BitmapContainer.cpp16
-rw-r--r--moses/src/ChartCell.cpp6
-rw-r--r--moses/src/ChartCell.h2
-rw-r--r--moses/src/ChartHypothesis.cpp2
-rw-r--r--moses/src/ChartHypothesisCollection.cpp6
-rw-r--r--moses/src/ChartHypothesisCollection.h4
-rw-r--r--moses/src/ChartManager.cpp4
-rw-r--r--moses/src/ChartRuleLookupManagerMemory.cpp2
-rw-r--r--moses/src/ChartRuleLookupManagerOnDisk.cpp6
-rw-r--r--moses/src/ChartRuleLookupManagerOnDisk.h2
-rw-r--r--moses/src/ChartTranslationOption.cpp2
-rw-r--r--moses/src/ChartTranslationOption.h2
-rw-r--r--moses/src/ChartTranslationOptionCollection.cpp18
-rw-r--r--moses/src/ChartTranslationOptionList.cpp2
-rw-r--r--moses/src/ChartTrellisNode.cpp6
-rw-r--r--moses/src/ChartTrellisPath.cpp2
-rw-r--r--moses/src/ConfusionNet.cpp2
-rw-r--r--moses/src/ConfusionNet.h4
-rw-r--r--moses/src/DecodeGraph.h4
-rw-r--r--moses/src/DecodeStep.h2
-rw-r--r--moses/src/DecodeStepGeneration.cpp2
-rw-r--r--moses/src/DotChartInMemory.h4
-rw-r--r--moses/src/DotChartOnDisk.cpp2
-rw-r--r--moses/src/DotChartOnDisk.h6
-rw-r--r--moses/src/DummyScoreProducers.cpp2
-rw-r--r--moses/src/DummyScoreProducers.h2
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/RandLMCache.h0
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/RandLMFilter.h96
-rw-r--r--moses/src/DynSAInclude/file.cpp6
-rw-r--r--moses/src/DynSAInclude/file.h2
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/hash.h16
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/onlineRLM.h16
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/params.cpp4
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/params.h2
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/perfectHash.h12
-rw-r--r--[-rwxr-xr-x]moses/src/DynSAInclude/quantizer.h8
-rw-r--r--moses/src/DynSAInclude/vocab.cpp4
-rw-r--r--moses/src/DynSAInclude/vocab.h4
-rw-r--r--moses/src/DynSuffixArray.cpp4
-rw-r--r--moses/src/FFState.h2
-rw-r--r--moses/src/FeatureFunction.cpp4
-rw-r--r--moses/src/File.h4
-rw-r--r--moses/src/FloydWarshall.cpp4
-rw-r--r--moses/src/Hypothesis.cpp8
-rw-r--r--moses/src/Hypothesis.h5
-rw-r--r--moses/src/HypothesisStackCubePruning.cpp6
-rw-r--r--moses/src/HypothesisStackNormal.cpp2
-rw-r--r--moses/src/Jamfile17
-rw-r--r--moses/src/LM/Factory.cpp44
-rw-r--r--moses/src/LM/IRST.cpp1
-rw-r--r--moses/src/LM/Implementation.cpp14
-rw-r--r--moses/src/LM/Jamfile84
-rw-r--r--moses/src/LM/Ken.cpp3
-rw-r--r--moses/src/LM/ORLM.cpp2
-rw-r--r--moses/src/LM/ParallelBackoff.cpp60
-rw-r--r--moses/src/LM/ParallelBackoff.h65
-rw-r--r--moses/src/LM/Rand.cpp (renamed from moses/src/LM/RandLM.cpp)52
-rw-r--r--moses/src/LM/Rand.h30
-rw-r--r--moses/src/LM/RandLM.h70
-rw-r--r--moses/src/LM/SRI.cpp7
-rw-r--r--moses/src/LM/SRI.h14
-rw-r--r--moses/src/LM/SingleFactor.cpp1
-rw-r--r--moses/src/LVoc.h4
-rw-r--r--moses/src/LexicalReordering.cpp2
-rw-r--r--moses/src/LexicalReordering.h2
-rw-r--r--moses/src/LexicalReorderingState.cpp8
-rw-r--r--moses/src/LexicalReorderingTable.cpp14
-rw-r--r--moses/src/Makefile.am357
-rw-r--r--moses/src/Manager.cpp20
-rw-r--r--moses/src/PDTAimp.h43
-rw-r--r--moses/src/Phrase.cpp42
-rw-r--r--moses/src/Phrase.h21
-rw-r--r--moses/src/PhraseDictionary.cpp22
-rw-r--r--moses/src/PhraseDictionary.h1
-rw-r--r--moses/src/PhraseDictionaryALSuffixArray.cpp2
-rw-r--r--moses/src/PhraseDictionaryDynSuffixArray.cpp4
-rw-r--r--moses/src/PhraseDictionaryDynSuffixArray.h1
-rw-r--r--moses/src/PhraseDictionaryMemory.cpp8
-rw-r--r--moses/src/PhraseDictionaryMemory.h4
-rw-r--r--moses/src/PhraseDictionaryNodeSCFG.cpp12
-rw-r--r--moses/src/PhraseDictionaryOnDisk.cpp21
-rw-r--r--moses/src/PhraseDictionaryOnDisk.h11
-rw-r--r--moses/src/PhraseDictionarySCFG.cpp8
-rw-r--r--moses/src/PhraseDictionarySCFG.h8
-rw-r--r--moses/src/PhraseDictionaryTree.cpp18
-rw-r--r--moses/src/PhraseDictionaryTreeAdaptor.cpp5
-rw-r--r--moses/src/PhraseDictionaryTreeAdaptor.h8
-rw-r--r--moses/src/PrefixTree.h2
-rw-r--r--moses/src/PrefixTreeMap.cpp20
-rw-r--r--moses/src/PrefixTreeMap.h4
-rw-r--r--moses/src/RuleCube.h4
-rw-r--r--moses/src/RuleCubeItem.cpp8
-rw-r--r--moses/src/RuleTableLoaderCompact.cpp7
-rw-r--r--moses/src/RuleTableLoaderCompact.h1
-rw-r--r--moses/src/RuleTableLoaderStandard.cpp6
-rw-r--r--moses/src/ScoreComponentCollection.h34
-rw-r--r--moses/src/SearchCubePruning.cpp2
-rw-r--r--moses/src/SearchNormal.cpp2
-rw-r--r--moses/src/Sentence.cpp7
-rw-r--r--moses/src/Sentence.h2
-rw-r--r--moses/src/StaticData.cpp34
-rw-r--r--moses/src/SyntacticLanguageModelFiles.h2
-rw-r--r--moses/src/TargetPhrase.cpp18
-rw-r--r--moses/src/TargetPhrase.h4
-rw-r--r--moses/src/TranslationOption.cpp6
-rw-r--r--moses/src/TranslationOptionCollection.cpp8
-rw-r--r--moses/src/TranslationOptionList.h6
-rw-r--r--moses/src/TranslationSystem.cpp2
-rw-r--r--moses/src/TreeInput.cpp6
-rw-r--r--moses/src/TreeInput.h3
-rw-r--r--moses/src/TrellisPath.cpp8
-rw-r--r--moses/src/TypeDef.h30
-rw-r--r--moses/src/Util.h6
-rw-r--r--moses/src/Word.cpp4
-rw-r--r--moses/src/WordLattice.h2
-rw-r--r--moses/src/WordsBitmap.cpp8
-rw-r--r--moses/src/WordsBitmap.h8
-rw-r--r--moses/src/WordsRange.h2
-rw-r--r--moses/src/XmlOption.cpp2
-rwxr-xr-xregenerate-makefiles.sh98
-rw-r--r--regression-testing/Jamfile32
-rw-r--r--scripts/.cvsignore1
-rw-r--r--scripts/Jamfile60
-rw-r--r--scripts/Makefile189
-rw-r--r--scripts/README2
-rwxr-xr-xscripts/check-dependencies.pl41
-rw-r--r--scripts/ems/biconcor/Jamfile3
-rw-r--r--scripts/ems/biconcor/Makefile10
-rw-r--r--scripts/ems/example/config.basic10
-rw-r--r--scripts/ems/example/config.factored8
-rw-r--r--scripts/ems/example/config.hierarchical10
-rw-r--r--scripts/ems/example/config.syntax10
-rw-r--r--scripts/ems/example/config.toy10
-rwxr-xr-xscripts/ems/experiment.perl4
-rwxr-xr-xscripts/ems/support/reuse-weights.perl4
-rwxr-xr-xscripts/recaser/train-recaser.perl92
-rw-r--r--scripts/released-files111
-rw-r--r--scripts/tokenizer/nonbreaking_prefixes/README.txt5
-rw-r--r--scripts/training/Jamfile14
-rw-r--r--scripts/training/compact-rule-table/Compactify.cpp (renamed from scripts/training/compact-rule-table/tools/Compactify.cpp)0
-rw-r--r--scripts/training/compact-rule-table/Compactify.h (renamed from scripts/training/compact-rule-table/tools/Compactify.h)0
-rw-r--r--scripts/training/compact-rule-table/Compactify_Main.cpp (renamed from scripts/training/compact-rule-table/tools/Compactify_Main.cpp)0
-rw-r--r--scripts/training/compact-rule-table/Exception.h (renamed from scripts/training/compact-rule-table/tools/Exception.h)0
-rw-r--r--scripts/training/compact-rule-table/Jamfile3
-rw-r--r--scripts/training/compact-rule-table/Makefile.am3
-rw-r--r--scripts/training/compact-rule-table/NumberedSet.h (renamed from scripts/training/compact-rule-table/tools/NumberedSet.h)0
-rw-r--r--scripts/training/compact-rule-table/Options.h (renamed from scripts/training/compact-rule-table/tools/Options.h)0
-rw-r--r--scripts/training/compact-rule-table/RuleTableParser.cpp (renamed from scripts/training/compact-rule-table/tools/RuleTableParser.cpp)0
-rw-r--r--scripts/training/compact-rule-table/RuleTableParser.h (renamed from scripts/training/compact-rule-table/tools/RuleTableParser.h)0
-rw-r--r--scripts/training/compact-rule-table/Tool.h (renamed from scripts/training/compact-rule-table/tools/Tool.h)0
-rw-r--r--scripts/training/compact-rule-table/aclocal.m41056
-rwxr-xr-xscripts/training/compact-rule-table/config.guess1511
-rw-r--r--scripts/training/compact-rule-table/config.h.in83
-rwxr-xr-xscripts/training/compact-rule-table/config.sub1705
-rwxr-xr-xscripts/training/compact-rule-table/configure17728
-rw-r--r--scripts/training/compact-rule-table/configure.ac33
-rwxr-xr-xscripts/training/compact-rule-table/depcomp630
-rwxr-xr-xscripts/training/compact-rule-table/install-sh520
-rwxr-xr-xscripts/training/compact-rule-table/ltmain.sh8406
-rw-r--r--scripts/training/compact-rule-table/m4/boost.m41133
-rw-r--r--scripts/training/compact-rule-table/m4/libtool.m47356
-rw-r--r--scripts/training/compact-rule-table/m4/ltoptions.m4368
-rw-r--r--scripts/training/compact-rule-table/m4/ltsugar.m4123
-rw-r--r--scripts/training/compact-rule-table/m4/ltversion.m423
-rw-r--r--scripts/training/compact-rule-table/m4/lt~obsolete.m492
-rwxr-xr-xscripts/training/compact-rule-table/missing376
-rw-r--r--scripts/training/compact-rule-table/tools/Makefile.am15
-rw-r--r--scripts/training/lexical-reordering/Jamfile3
-rw-r--r--scripts/training/lexical-reordering/Makefile15
-rwxr-xr-xscripts/training/mbr/Makefile14
-rw-r--r--scripts/training/mbr/mbr.cpp398
-rwxr-xr-xscripts/training/mert-moses.pl31
-rw-r--r--scripts/training/phrase-extract/Jamfile26
-rw-r--r--scripts/training/phrase-extract/Makefile37
-rw-r--r--scripts/training/phrase-extract/XmlTree.h1
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Alignment.cpp43
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Alignment.h47
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.cpp562
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.h178
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ComposedRule.cpp129
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ComposedRule.h70
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Exception.h64
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.cpp476
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.h67
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Jamfile3
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Main.cpp26
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Makefile34
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Makefile.dep13
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Node.cpp71
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Node.h182
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Options.h61
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ParseTree.cpp52
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ParseTree.h111
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Rule.h102
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ScfgRule.cpp144
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ScfgRule.h76
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.cpp153
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.h60
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Span.cpp74
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Span.h51
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Subgraph.cpp184
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/Subgraph.h126
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.cpp64
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.h47
-rw-r--r--scripts/training/phrase-extract/extract-ghkm/extract-ghkm.cpp263
-rw-r--r--scripts/training/symal/Jamfile3
-rw-r--r--scripts/training/symal/Makefile11
-rwxr-xr-xscripts/training/train-model.perl.missing_bin_dir (renamed from scripts/training/train-model.perl)17
-rw-r--r--server/Makefile.am5
-rw-r--r--util/Jamfile10
-rw-r--r--util/Makefile.am12
-rw-r--r--util/check.hh21
-rw-r--r--util/file.cc19
-rw-r--r--util/file.hh2
-rw-r--r--util/file_piece.cc10
-rw-r--r--util/file_piece_test.cc41
-rw-r--r--util/getopt.c77
-rw-r--r--[-rwxr-xr-x]util/getopt.hh191
-rw-r--r--util/have.hh18
-rw-r--r--util/mmap.cc11
-rw-r--r--util/portability.cc74
-rw-r--r--util/portability.hh115
-rw-r--r--util/probing_hash_table.hh33
-rw-r--r--util/probing_hash_table_test.cc27
-rw-r--r--util/sorted_uniform.hh93
-rw-r--r--util/sorted_uniform_test.cc84
-rw-r--r--util/tokenize_piece.hh64
-rw-r--r--util/tokenize_piece_test.cc50
-rw-r--r--util/util.xcodeproj/project.pbxproj298
-rwxr-xr-xvalidate_more_revisions.sh40
-rwxr-xr-xvalidate_revision.sh50
757 files changed, 74354 insertions, 51704 deletions
diff --git a/.gitignore b/.gitignore
index a4b810efa..facb831bb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,54 +1,30 @@
-*.[oa]
-*.lo
-.deps
-*.orig
-*.la
-*.Po
+*.so
+*.a
+*.swp
*~
-CreateOnDisk/src/CreateOnDiskPt
-Makefile
-Makefile.in
-aclocal.m4
-autom4te.cache/
-config.h
-config.log
-config.status
-configure
-lm/.deps/
-lm/.libs/
-util/.deps/
-util/.libs/
+dist*
+jam-files/bjam
+jam-files/engine/bootstrap
+jam-files/engine/bin.*
lm/build_binary
lm/query
-libtool
-mert/.deps/
-kenlm/build_binary
-mert/Makefile
-mert/Makefile.in
mert/evaluator
mert/extractor
mert/mert
-misc/.deps/
-mert/.libs/
-misc/Makefile
-misc/Makefile.in
+mert/megam_i686.opt
+mert/pro
misc/processLexicalTable
misc/processPhraseTable
misc/queryLexicalTable
mira/mira
misc/queryPhraseTable
-moses-chart/src/.deps/
moses-chart-cmd/src/moses_chart
-moses-cmd/src/.deps/
-moses-cmd/src/Makefile
-moses-cmd/src/Makefile.in
moses-cmd/src/checkplf
moses-cmd/src/lmbrgrid
moses-cmd/src/moses
-moses/src/.deps/
-moses/src/.libs/
-moses/src/Makefile
-moses/src/Makefile.in
+regression-testing/moses-reg-test-data-*
+regression-testing/tests/mert.extractor-bin/FEATSTAT*
+regression-testing/tests/mert.extractor-bin/SCORESTAT*
scripts/ems/biconcor/biconcor
scripts/release-exclude
scripts/training/cmert-0.5/mert
@@ -62,34 +38,13 @@ scripts/training/phrase-extract/consolidate
scripts/training/phrase-extract/consolidate-direct
scripts/training/phrase-extract/consolidate-reverse
scripts/training/phrase-extract/extract
+scripts/training/phrase-extract/extract-ghkm/tools/extract-ghkm
scripts/training/phrase-extract/extract-lex
scripts/training/phrase-extract/extract-rules
scripts/training/phrase-extract/relax-parse
scripts/training/phrase-extract/score
scripts/training/phrase-extract/statistics
scripts/training/symal/symal
-scripts/release-exclude
-scripts/training/eppex/counter
-scripts/training/eppex/eppex
-scripts/training/lexical-reordering/score
-scripts/training/memscore/memscore
-scripts/training/phrase-extract/consolidate
-scripts/training/phrase-extract/consolidate-direct
-scripts/training/phrase-extract/consolidate-reverse
-scripts/training/phrase-extract/extract-lex
-scripts/training/phrase-extract/extract-rules
-scripts/training/phrase-extract/relax-parse
-scripts/training/phrase-extract/statistics
-stamp-h1
-CreateOnDisk/src/CreateOnDiskPt
-.libs
-kenlm/query
-libtool
-m4/lt*
-misc/queryPhraseTable
-moses-chart-cmd/src/moses_chart
-moses-cmd/src/checkplf
-moses-cmd/src/lmbrgrid
-regression-testing/moses-reg-test-data-*
-unittest/moses_test
-ltmain.sh
+scripts/training/train-model.perl
+dist
+bin
diff --git a/BUILD-INSTRUCTIONS.txt b/BUILD-INSTRUCTIONS.txt
index 8000e072a..e5c0ad8ea 100644
--- a/BUILD-INSTRUCTIONS.txt
+++ b/BUILD-INSTRUCTIONS.txt
@@ -1,92 +1,109 @@
-0) Preliminaries
+PRELIMINARIES
-Before building you need to decide what language model toolkit (SRI's,
-IRST's, or Ken's) you want to use.
-
-If you want to use SRI's, you will need to download its source
-and build it. The SRILM can be downloaded from
-http://www.speech.sri.com/projects/srilm/download.html .
+Moses is primarily targeted at gcc on UNIX.
+Moses requires gcc, Boost >= 1.36, and zlib including the headers that some
+distributions package separately (i.e. -dev or -devel packages). Source is
+available at http://boost.org .
-If you want to use IRST's, you will need to download its source and
-build it. The IRSTLM can be downloaded from either the SourceForge
-website
-http://sourceforge.net/projects/irstlm
-or the official IRSTLM website
-http://hlt.fbk.eu/en/irstlm
+There are several optional dependencies:
+GIZA++ from http://code.google.com/p/giza-pp/ is used to build phrase tables.
-Ken's LM is included with the Moses distribution.
+Moses server requires xmlrpc-c with abyss-server. Source is available from
+http://xmlrpc-c.sourceforge.net/.
-IMPORTANT: These instructions are for building the moses decoder ONLY,
-the training and tuning SCRIPTS contained in scripts/ must be built
-and installed separately. Also, they may require modification to
-work in certain environments.
+The scripts support building ARPA format language models with SRILM or IRSTLM.
+To apply models inside the decoder, you can use SRILM, IRSTLM, or KenLM. The
+ARPA format is exchangable so that e.g. you can build a model with SRILM and
+run the decoder with IRSTLM or KenLM.
-Versions 1.9 (or higher) of aclocal and automake are required.
+If you want to use SRILM, you will need to download its source and build it.
+The SRILM can be downloaded from
+http://www.speech.sri.com/projects/srilm/download.html .
+On x86_64, the default machine type is broken. Edit sbin/machine-type, find
+this code
+ else if (`uname -m` == x86_64) then
+ set MACHINE_TYPE = i686
+and change it to
+ else if (`uname -m` == x86_64) then
+ set MACHINE_TYPE = i686-m64
+You may have to chmod +w sbin/machine-type first.
+
+If you want to use IRSTLM, you will need to download its source and build it.
+The IRSTLM can be downloaded from either the SourceForge website
+http://sourceforge.net/projects/irstlm
+or the official IRSTLM website
+http://hlt.fbk.eu/en/irstlm
-For Mac OSX users:
-Standard distribution usually includes versions 1.6.
-Get correct versions, and set the environment variables ACLOCAL and
-AUTOMAKE to the correct paths in ./regenerate-makefiles.sh.
+KenLM is included with Moses.
--------------------------------------------------------------------------
-1) Instructions for building with SRILM
-
-Build SRILM according to their release instructions. Make sure that
-you DO NOT override the MACHINE_TYPE variable on the command line when
-you do so, as this can lead to problems locating the library.
+ADVICE ON INSTALLING EXTERNAL LIBRARIES
- ./regenerate-makefiles.sh
- ./configure [--with-srilm=/path/to/srilm] [--with-irstlm=/path/to/irstlm]
+Generally, for trouble installing external libraries, you should get support
+directly from the library maker:
-The resulting decoder binary will be moses-cmd/src/moses .
+Boost: http://www.boost.org/doc/libs/1_48_0/more/getting_started/unix-variants.html
+IRSTLM: https://list.fbk.eu/sympa/subscribe/user-irstlm
+SRILM: http://www.speech.sri.com/projects/srilm/#srilm-user
---------------------------------------------------------------------------
+However, here's some general advice on installing software (for bash users):
-2) Instructions for building with IRSTLM
+#Determine where you want to install packages
+PREFIX=$HOME/usr
+#If your system has lib64 directories, lib64 should be used AND NOT lib
+if [ -d /lib64 ]; then
+ LIBDIR=$PREFIX/lib64
+else
+ LIBDIR=$PREFIX/lib
+fi
+#If you're installing to a non-standard path, tell programs where to find things:
+export PATH=$PREFIX/bin${PATH:+:$PATH}
+export LD_LIBRARY_PATH=$LIBDIR${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}
+export LIBRARY_PATH=$LIBDIR${LIBRARY_PATH:+:$LIBRARY_PATH}
+export CPATH=$PREFIX/include${CPATH:+:$CPATH}
-Build IRSTLM according to its release instructions. Make sure that
-you DO NOT override the MACHINE_TYPE variable on the command line when
-you do so, as this can lead to problems locating the library.
+Add all the above code to your .bashrc or .bash_login as appropriate. Then
+you're ready to install packages in non-standard paths:
- ./regenerate-makefiles.sh
- ./configure [--with-srilm=/path/to/srilm] [--with-irstlm=/path/to/irstlm]
+#For autotools packages e.g. xmlrpc-c
+./configure --prefix=$PREFIX --libdir=$PREFIX/lib64 [other options here]
-The resulting decoder binary will be moses-cmd/src/moses .
+#For Boost:
+./bootstrap.sh
+./b2 --prefix=$PREFIX --libdir=$PREFIX/lib64 link=static,shared threading=multi install
--------------------------------------------------------------------------
-3) Instructions for building with Ken's LM
+BUILDING
- ./regenerate-makefiles.sh
- ./configure
+Building consists of running
+ ./bjam [options]
-The resulting decoder binary will be moses-cmd/src/moses .
-Use language model 8 in your moses.ini:
-[lmodel-file]
-8 0 2 /path/to/lm.arpa
+Common options are:
+--with-srilm=/path/to/srilm to compile the decoder with SRILM support
+--with-irstlm=/path/to/irstlm to compile the decoder with IRSTLM support
+--with-giza=/path/to/giza to enable training scripts
+-jN where N is the number of CPUs
---------------------------------------------------------------------------
+Binaries will appear in dist/bin.
+For further documentation, run
+ ./bjam --help
+--------------------------------------------------------------------------
ALTERNATIVE WAYS TO BUILD ON UNIX AND OTHER PLATFORMS
Microsoft Windows
-----------------
-Tested on 32-bit Windows XP and Vista using Visual Studio 2005.
-Again, refer to the old manual
- http://homepages.inf.ed.ac.uk/s0565741/papers/developers-manual.pdf
-The Windows build doesn't use the SRI or IRST language model libraries as they can't be compiled
-under Windows using Visual Studio. Instead, an internal language model, which behave like SRILM is used,
-however, it can only handle up to trigrams.
+Moses is primarily targeted at gcc on UNIX. Windows users should consult
+http://ssli.ee.washington.edu/people/amittai/Moses-on-Win7.pdf .
Binaries for all external libraries needed can be downloaded from
http://www.statmt.org/moses/?n=Moses.LibrariesUsed
-Only the decoder is developed and tested under Windows. There are difficulties using the training scripts under Windows, even with Cygwin.
-
-
-
+Only the decoder is developed and tested under Windows. There are difficulties
+using the training scripts under Windows, even with Cygwin.
diff --git a/CLEANUP-AFTER-AUTOTOOLS.sh b/CLEANUP-AFTER-AUTOTOOLS.sh
new file mode 100755
index 000000000..ff76e155b
--- /dev/null
+++ b/CLEANUP-AFTER-AUTOTOOLS.sh
@@ -0,0 +1,337 @@
+#!/bin/bash
+#Moses used to run autotools. This helps people migrate to bjam by deleting files that it generated. Run it in the top-level directory.
+while read a; do
+ rm -rf $a
+done <<EOF
+Makefile
+Makefile.in
+OnDiskPt/src/
+aclocal.m4
+autom4te.cache/
+config.guess
+config.h
+config.log
+config.status
+config.sub
+configure
+depcomp
+install-sh
+libtool
+lm/.deps/
+lm/.libs/
+lm/Makefile
+lm/Makefile.in
+lm/bhiksha.lo
+lm/bhiksha.o
+lm/binary_format.lo
+lm/binary_format.o
+lm/build_binary.o
+lm/config.lo
+lm/config.o
+lm/libkenlm.la
+lm/lm_exception.lo
+lm/lm_exception.o
+lm/model.lo
+lm/model.o
+lm/ngram_query.o
+lm/quantize.lo
+lm/quantize.o
+lm/read_arpa.lo
+lm/read_arpa.o
+lm/search_hashed.lo
+lm/search_hashed.o
+lm/search_trie.lo
+lm/search_trie.o
+lm/trie.lo
+lm/trie.o
+lm/trie_sort.lo
+lm/trie_sort.o
+lm/virtual_interface.lo
+lm/virtual_interface.o
+lm/vocab.lo
+lm/vocab.o
+ltmain.sh
+m4/
+mert/.deps/
+mert/Makefile
+mert/Makefile.in
+misc/.deps/
+misc/Makefile
+misc/Makefile.in
+missing
+moses-chart-cmd/src/.deps/
+moses-chart-cmd/src/Makefile
+moses-chart-cmd/src/Makefile.in
+moses-cmd/src/.deps/
+moses-cmd/src/Makefile
+moses-cmd/src/Makefile.in
+moses/src/.deps/
+moses/src/AlignmentInfo.lo
+moses/src/AlignmentInfo.o
+moses/src/AlignmentInfoCollection.lo
+moses/src/AlignmentInfoCollection.o
+moses/src/Base.lo
+moses/src/Base.o
+moses/src/BilingualDynSuffixArray.lo
+moses/src/BilingualDynSuffixArray.o
+moses/src/BitmapContainer.lo
+moses/src/BitmapContainer.o
+moses/src/ChartCell.lo
+moses/src/ChartCell.o
+moses/src/ChartCellCollection.lo
+moses/src/ChartCellCollection.o
+moses/src/ChartHypothesis.lo
+moses/src/ChartHypothesis.o
+moses/src/ChartHypothesisCollection.lo
+moses/src/ChartHypothesisCollection.o
+moses/src/ChartManager.lo
+moses/src/ChartManager.o
+moses/src/ChartRuleLookupManager.lo
+moses/src/ChartRuleLookupManager.o
+moses/src/ChartRuleLookupManagerMemory.lo
+moses/src/ChartRuleLookupManagerMemory.o
+moses/src/ChartRuleLookupManagerOnDisk.lo
+moses/src/ChartRuleLookupManagerOnDisk.o
+moses/src/ChartTranslationOption.lo
+moses/src/ChartTranslationOption.o
+moses/src/ChartTranslationOptionCollection.lo
+moses/src/ChartTranslationOptionCollection.o
+moses/src/ChartTranslationOptionList.lo
+moses/src/ChartTranslationOptionList.o
+moses/src/ChartTrellisDetour.lo
+moses/src/ChartTrellisDetour.o
+moses/src/ChartTrellisDetourQueue.lo
+moses/src/ChartTrellisDetourQueue.o
+moses/src/ChartTrellisNode.lo
+moses/src/ChartTrellisNode.o
+moses/src/ChartTrellisPath.lo
+moses/src/ChartTrellisPath.o
+moses/src/ConfusionNet.lo
+moses/src/ConfusionNet.o
+moses/src/DecodeFeature.lo
+moses/src/DecodeFeature.o
+moses/src/DecodeGraph.lo
+moses/src/DecodeGraph.o
+moses/src/DecodeStep.lo
+moses/src/DecodeStep.o
+moses/src/DecodeStepGeneration.lo
+moses/src/DecodeStepGeneration.o
+moses/src/DecodeStepTranslation.lo
+moses/src/DecodeStepTranslation.o
+moses/src/Dictionary.lo
+moses/src/Dictionary.o
+moses/src/DotChart.lo
+moses/src/DotChart.o
+moses/src/DotChartInMemory.lo
+moses/src/DotChartInMemory.o
+moses/src/DotChartOnDisk.lo
+moses/src/DotChartOnDisk.o
+moses/src/DummyScoreProducers.lo
+moses/src/DummyScoreProducers.o
+moses/src/DynSuffixArray.lo
+moses/src/DynSuffixArray.o
+moses/src/FFState.lo
+moses/src/FFState.o
+moses/src/Factor.lo
+moses/src/Factor.o
+moses/src/FactorCollection.lo
+moses/src/FactorCollection.o
+moses/src/FactorTypeSet.lo
+moses/src/FactorTypeSet.o
+moses/src/Factory.lo
+moses/src/Factory.o
+moses/src/FeatureFunction.lo
+moses/src/FeatureFunction.o
+moses/src/FloydWarshall.lo
+moses/src/FloydWarshall.o
+moses/src/GenerationDictionary.lo
+moses/src/GenerationDictionary.o
+moses/src/GlobalLexicalModel.lo
+moses/src/GlobalLexicalModel.o
+moses/src/Hypothesis.lo
+moses/src/Hypothesis.o
+moses/src/HypothesisStack.lo
+moses/src/HypothesisStack.o
+moses/src/HypothesisStackCubePruning.lo
+moses/src/HypothesisStackCubePruning.o
+moses/src/HypothesisStackNormal.lo
+moses/src/HypothesisStackNormal.o
+moses/src/Implementation.lo
+moses/src/Implementation.o
+moses/src/InputFileStream.lo
+moses/src/InputFileStream.o
+moses/src/InputType.lo
+moses/src/InputType.o
+moses/src/Joint.lo
+moses/src/Joint.o
+moses/src/Ken.lo
+moses/src/Ken.o
+moses/src/LMList.lo
+moses/src/LMList.o
+moses/src/LVoc.lo
+moses/src/LVoc.o
+moses/src/LexicalReordering.lo
+moses/src/LexicalReordering.o
+moses/src/LexicalReorderingState.lo
+moses/src/LexicalReorderingState.o
+moses/src/LexicalReorderingTable.lo
+moses/src/LexicalReorderingTable.o
+moses/src/Makefile
+moses/src/Makefile.in
+moses/src/Manager.lo
+moses/src/Manager.o
+moses/src/MultiFactor.lo
+moses/src/MultiFactor.o
+moses/src/PCNTools.lo
+moses/src/PCNTools.o
+moses/src/Parameter.lo
+moses/src/Parameter.o
+moses/src/PartialTranslOptColl.lo
+moses/src/PartialTranslOptColl.o
+moses/src/Phrase.lo
+moses/src/Phrase.o
+moses/src/PhraseDictionary.lo
+moses/src/PhraseDictionary.o
+moses/src/PhraseDictionaryALSuffixArray.lo
+moses/src/PhraseDictionaryALSuffixArray.o
+moses/src/PhraseDictionaryDynSuffixArray.lo
+moses/src/PhraseDictionaryDynSuffixArray.o
+moses/src/PhraseDictionaryHiero.lo
+moses/src/PhraseDictionaryHiero.o
+moses/src/PhraseDictionaryMemory.lo
+moses/src/PhraseDictionaryMemory.o
+moses/src/PhraseDictionaryNode.lo
+moses/src/PhraseDictionaryNode.o
+moses/src/PhraseDictionaryNodeSCFG.lo
+moses/src/PhraseDictionaryNodeSCFG.o
+moses/src/PhraseDictionaryOnDisk.lo
+moses/src/PhraseDictionaryOnDisk.o
+moses/src/PhraseDictionarySCFG.lo
+moses/src/PhraseDictionarySCFG.o
+moses/src/PhraseDictionaryTree.lo
+moses/src/PhraseDictionaryTree.o
+moses/src/PhraseDictionaryTreeAdaptor.lo
+moses/src/PhraseDictionaryTreeAdaptor.o
+moses/src/PrefixTreeMap.lo
+moses/src/PrefixTreeMap.o
+moses/src/Remote.lo
+moses/src/Remote.o
+moses/src/ReorderingConstraint.lo
+moses/src/ReorderingConstraint.o
+moses/src/ReorderingStack.lo
+moses/src/ReorderingStack.o
+moses/src/RuleCube.lo
+moses/src/RuleCube.o
+moses/src/RuleCubeItem.lo
+moses/src/RuleCubeItem.o
+moses/src/RuleCubeQueue.lo
+moses/src/RuleCubeQueue.o
+moses/src/RuleTableLoaderCompact.lo
+moses/src/RuleTableLoaderCompact.o
+moses/src/RuleTableLoaderFactory.lo
+moses/src/RuleTableLoaderFactory.o
+moses/src/RuleTableLoaderHiero.lo
+moses/src/RuleTableLoaderHiero.o
+moses/src/RuleTableLoaderStandard.lo
+moses/src/RuleTableLoaderStandard.o
+moses/src/ScoreComponentCollection.lo
+moses/src/ScoreComponentCollection.o
+moses/src/ScoreIndexManager.lo
+moses/src/ScoreIndexManager.o
+moses/src/ScoreProducer.lo
+moses/src/ScoreProducer.o
+moses/src/Search.lo
+moses/src/Search.o
+moses/src/SearchCubePruning.lo
+moses/src/SearchCubePruning.o
+moses/src/SearchNormal.lo
+moses/src/SearchNormal.o
+moses/src/Sentence.lo
+moses/src/Sentence.o
+moses/src/SentenceStats.lo
+moses/src/SentenceStats.o
+moses/src/SingleFactor.lo
+moses/src/SingleFactor.o
+moses/src/SquareMatrix.lo
+moses/src/SquareMatrix.o
+moses/src/TargetPhrase.lo
+moses/src/TargetPhrase.o
+moses/src/TargetPhraseCollection.lo
+moses/src/TargetPhraseCollection.o
+moses/src/ThreadPool.lo
+moses/src/ThreadPool.o
+moses/src/Timer.lo
+moses/src/Timer.o
+moses/src/TranslationOption.lo
+moses/src/TranslationOption.o
+moses/src/TranslationOptionCollection.lo
+moses/src/TranslationOptionCollection.o
+moses/src/TranslationOptionCollectionConfusionNet.lo
+moses/src/TranslationOptionCollectionConfusionNet.o
+moses/src/TranslationOptionCollectionText.lo
+moses/src/TranslationOptionCollectionText.o
+moses/src/TranslationOptionList.lo
+moses/src/TranslationOptionList.o
+moses/src/TranslationSystem.lo
+moses/src/TranslationSystem.o
+moses/src/file.lo
+moses/src/file.o
+moses/src/hash.lo
+moses/src/hash.o
+moses/src/vocab.lo
+moses/src/vocab.o
+scripts/ems/biconcor/Alignment.o
+scripts/ems/biconcor/Mismatch.o
+scripts/ems/biconcor/PhrasePair.o
+scripts/ems/biconcor/PhrasePairCollection.o
+scripts/ems/biconcor/SuffixArray.o
+scripts/ems/biconcor/TargetCorpus.o
+scripts/ems/biconcor/Vocabulary.o
+scripts/ems/biconcor/base64.o
+scripts/ems/biconcor/biconcor.o
+scripts/training/compact-rule-table/config.log
+scripts/training/compact-rule-table/config.status
+scripts/training/eppex/
+scripts/training/lexical-reordering/reordering_classes.o
+scripts/training/memscore/
+scripts/training/phrase-extract/AlignmentPhrase.o
+scripts/training/phrase-extract/ExtractedRule.o
+scripts/training/phrase-extract/HoleCollection.o
+scripts/training/phrase-extract/InputFileStream.o
+scripts/training/phrase-extract/PhraseAlignment.o
+scripts/training/phrase-extract/SentenceAlignment.o
+scripts/training/phrase-extract/SentenceAlignmentWithSyntax.o
+scripts/training/phrase-extract/SyntaxTree.o
+scripts/training/phrase-extract/XmlTree.o
+scripts/training/phrase-extract/consolidate-direct.o
+scripts/training/phrase-extract/consolidate-reverse.o
+scripts/training/phrase-extract/consolidate.o
+scripts/training/phrase-extract/extract-lex.o
+scripts/training/phrase-extract/extract-rules.o
+scripts/training/phrase-extract/extract.o
+scripts/training/phrase-extract/relax-parse.o
+scripts/training/phrase-extract/score.o
+scripts/training/phrase-extract/statistics.o
+scripts/training/phrase-extract/tables-core.o
+scripts/training/symal/cmd.o
+stamp-h1
+util/.deps/
+util/.libs/
+util/Makefile
+util/Makefile.in
+util/bit_packing.lo
+util/bit_packing.o
+util/ersatz_progress.lo
+util/ersatz_progress.o
+util/exception.lo
+util/exception.o
+util/file.lo
+util/file.o
+util/file_piece.lo
+util/file_piece.o
+util/libkenutil.la
+util/mmap.lo
+util/mmap.o
+util/murmur_hash.lo
+util/murmur_hash.o
+EOF
diff --git a/CreateOnDisk/src/Makefile.am b/CreateOnDisk/src/Makefile.am
deleted file mode 100644
index f8c99a741..000000000
--- a/CreateOnDisk/src/Makefile.am
+++ /dev/null
@@ -1,6 +0,0 @@
-bin_PROGRAMS = CreateOnDiskPt
-CreateOnDiskPt_SOURCES = Main.cpp
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES -DUSE_HYPO_POOL -I$(top_srcdir)/moses/src $(BOOST_CPPFLAGS)
-
-CreateOnDiskPt_LDADD = -L$(top_srcdir)/OnDiskPt/src -L$(top_srcdir)/moses/src -lOnDiskPt -lmoses $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-CreateOnDiskPt_DEPENDENCIES = $(top_srcdir)/OnDiskPt/src/libOnDiskPt.a $(top_srcdir)/moses/src/libmoses.la
diff --git a/Jamroot b/Jamroot
new file mode 100644
index 000000000..a6b26d5ec
--- /dev/null
+++ b/Jamroot
@@ -0,0 +1,208 @@
+#BUILDING MOSES
+#
+#PACKAGES
+#Language models (optional):
+#--with-irstlm=/path/to/irstlm
+#--with-srilm=/path/to/srilm See moses/src/LM/Jamfile for more options.
+#--with-randlm=/path/to/randlm
+#KenLM is always compiled.
+#
+#--with-boost=/path/to/boost
+#If Boost is in a non-standard location, specify it here. This directory is
+#expected to contain include and lib or lib64.
+#
+#--with-xmlrpc-c=/path/to/xmlrpc-c for libxmlrpc-c (used by server)
+#Note that, like language models, this is the --prefix where the library was
+#installed, not some executable within the library.
+#
+#--with-giza=/path/to/giza
+#Indicates where binaries GIZA++, snt2cooc.out, and mkcls live.
+#Builds scripts/training/train-model.perl using these paths.
+#
+#
+#REGRESSION TESTING
+#--with-regtest=/path/to/moses-reg-test-data
+#
+#
+#INSTALLATION
+#--prefix=/path/to/prefix sets the install prefix [dist].
+#--bindir=/path/to/prefix/bin sets the bin directory [PREFIX/bin]
+#--libdir=/path/to/prefix/lib sets the lib directory [PREFIX/lib]
+#--install-scripts=/path/to/scripts copies scripts into a directory.
+#--git appends the git revision to the prefix directory.
+#
+#
+#BUILD OPTIONS
+# By default, the build is multi-threaded, optimized, and statically linked.
+# Pass these to change the build:
+#
+# threading=single|multi controls threading (default multi)
+#
+# variant=release|debug|profile builds optimized (default), for debug, or for
+# profiling
+#
+# link=static|shared controls linking (default static)
+#
+# debug-symbols=on|off include (default) or exclude debugging
+# information also known as -g
+#
+# --notrace compiles without TRACE macros
+#
+# --enable-boost-pool uses Boost pools for the memory SCFG table
+#
+#
+#CONTROLLING THE BUILD
+#-a to build from scratch
+#-j$NCPUS to compile in parallel
+#--clean to clean
+
+import option ;
+import modules ;
+
+path-constant TOP : . ;
+
+# Shell with trailing line removed http://lists.boost.org/boost-build/2007/08/17051.php
+rule trim-nl ( str extras * ) {
+ return [ MATCH "([^
+]*)" : $(str) ] $(extras) ;
+}
+rule _shell ( cmd : extras * ) {
+ return [ trim-nl [ SHELL $(cmd) : $(extras) ] ] ;
+}
+
+local cleaning = [ option.get "clean" : : yes ] ;
+cleaning ?= [ option.get "clean-all" : no : yes ] ;
+if "clean" in [ modules.peek : ARGV ] {
+ cleaning = yes ;
+}
+constant CLEANING : $(cleaning) ;
+
+#Run g++ with empty main and these arguments to see if it passes.
+rule test_flags ( flags ) {
+ if [ SHELL $(TOP)"/jam-files/test.sh "$(flags) ] = 0 {
+ return true ;
+ } else {
+ return ;
+ }
+}
+
+#Determine if a library can be compiled statically.
+rule auto_shared ( name : additional ? ) {
+ additional ?= "" ;
+ if [ test_flags $(additional)" -static -l"$(name) ] {
+ return ;
+ } else {
+ return "<link>shared" ;
+ }
+}
+
+with-boost = [ option.get "with-boost" ] ;
+if $(with-boost) {
+ L-boost-search = -L$(with-boost)/lib" "-L$(with-boost)/lib64 ;
+ boost-search = <search>$(with-boost)/lib <search>$(with-boost)/lib64 ;
+ I-boost-include = -I$(with-boost)/include ;
+ boost-include = <include>$(with-boost)/include ;
+} else {
+ L-boost-search = "" ;
+ boost-search = ;
+ I-boost-include = "" ;
+ boost-include = ;
+}
+
+boost-shell = [ SHELL "g++ "$(I-boost-include)" -dM -x c++ -E /dev/null -include boost/version.hpp 2>/dev/null |grep '#define BOOST_VERSION '" : exit-status ] ;
+if $(boost-shell[2]) != 0 && $(CLEANING) = no {
+ exit Boost does not seem to be installed or g++ is confused. : 1 ;
+}
+boost-version = [ MATCH "#define BOOST_VERSION ([0-9]*)" : $(boost-shell[1]) ] ;
+if $(boost-version) < 103600 && $(cleaning) = no {
+ exit You have Boost $(boost-version). Moses requires at least 103600 (and preferably newer). : 1 ;
+}
+#Are we linking static binaries against shared boost?
+boost-auto-shared = [ auto_shared "boost_program_options" : $(L-boost-search) ] ;
+#Convenience rule for boost libraries. Defines library boost_$(name).
+rule boost_lib ( name macro ) {
+ #Link multi-threaded programs against the -mt version if available. Old
+ #versions of boost do not have -mt tagged versions of all libraries. Sadly,
+ #boost.jam does not handle this correctly.
+ if [ test_flags $(L-boost-search)" -lboost_"$(name)"-mt" ] {
+ lib inner_boost_$(name) : : <threading>single $(boost-search) <name>boost_$(name) ;
+ lib inner_boost_$(name) : : <threading>multi $(boost-search) <name>boost_$(name)-mt ;
+ } else {
+ lib inner_boost_$(name) : : $(boost-search) <name>boost_$(name) ;
+ }
+
+ alias boost_$(name) : inner_boost_$(name) : $(boost-auto-shared) : : <link>shared:<define>BOOST_$(macro) $(boost-include) ;
+}
+#See tools/build/v2/contrib/boost.jam in a boost distribution for a table of macros to define.
+boost_lib thread THREAD_DYN_DLL ;
+boost_lib program_options PROGRAM_OPTIONS_DYN_LINK ;
+boost_lib unit_test_framework TEST_DYN_LINK ;
+
+#Link normally to a library, but sometimes static isn't installed so fall back to dynamic.
+rule external_lib ( name ) {
+ lib $(name) : : [ auto_shared $(name) ] ;
+}
+
+external_lib z ;
+
+requirements = ;
+
+#libSegFault prints a stack trace on segfault. Link against it if available.
+if [ test_flags "-lSegfault" ] {
+ external_lib SegFault ;
+ requirements += <library>SegFault ;
+}
+
+requirements += [ option.get "notrace" : <define>TRACE_ENABLE=1 ] ;
+requirements += [ option.get "enable-boost-pool" : : <define>USE_BOOST_POOL ] ;
+
+import os ;
+
+cxxflags = [ os.environ "CXXFLAGS" ] ;
+cflags = [ os.environ "CFLAGS" ] ;
+ldflags = [ os.environ "LDFLAGS" ] ;
+
+project : default-build
+ <threading>multi
+ <warnings>on
+ <debug-symbols>on
+ <variant>release
+ <link>static
+ ;
+
+project : requirements
+ <threading>multi:<define>WITH_THREADS
+ <threading>multi:<library>boost_thread
+ <define>_FILE_OFFSET_BITS=64 <define>_LARGE_FILES
+ $(requirements)
+ <cxxflags>$(cxxflags)
+ <cflags>$(cflags)
+ <linkflags>$(ldflags)
+ ;
+
+#Add directories here if you want their incidental targets too (i.e. tests).
+build-project lm ;
+build-project util ;
+#Trigger instllation into legacy paths.
+build-project mert ;
+build-project moses-cmd/src ;
+build-project moses-chart-cmd/src ;
+#Scripts have their own binaries.
+build-project scripts ;
+#Regression tests (only does anything if --with-regtest is passed)
+build-project regression-testing ;
+
+if [ option.get "git" : : "yes" ] {
+ local revision = [ _shell "git rev-parse --verify HEAD |head -c 7" ] ;
+ constant GITTAG : "/"$(revision) ;
+} else {
+ constant GITTAG : "" ;
+}
+
+alias programs : lm//query lm//build_binary moses-chart-cmd/src//moses_chart moses-cmd/src//programs OnDiskPt//CreateOnDisk mert//programs contrib/server//mosesserver misc//programs ;
+
+prefix = [ option.get "prefix" : $(TOP)/dist$(GITTAG) ] ;
+bindir = [ option.get "bindir" : $(prefix)/bin ] ;
+libdir = [ option.get "libdir" : $(prefix)/lib ] ;
+install prefix-bin : programs : <location>$(bindir) <install-dependencies>on <install-type>EXE <link>shared:<dll-path>$(libdir) ;
+install prefix-lib : programs : <location>$(libdir) <install-dependencies>on <install-type>LIB <link>shared:<dll-path>$(libdir) ;
diff --git a/Makefile.am b/Makefile.am
deleted file mode 100644
index c768f2eac..000000000
--- a/Makefile.am
+++ /dev/null
@@ -1,17 +0,0 @@
-# not a GNU package. You can remove this line, if
-# have all needed files, that a GNU package needs
-AUTOMAKE_OPTIONS = foreign
-
-ACLOCAL_AMFLAGS = -I m4
-
-# order is important here: build moses before moses-cmd
-if WITH_MERT
- MERT = mert
-endif
-if WITH_SERVER
- SERVER = server
-endif
-if WITH_UNITTEST
- UNITTEST = unittest
-endif
-SUBDIRS = util lm moses/src OnDiskPt/src moses-cmd/src misc moses-chart-cmd/src CreateOnDisk/src mira $(MERT) $(SERVER) $(UNITTEST)
diff --git a/OnDiskPt/Jamfile b/OnDiskPt/Jamfile
new file mode 100644
index 000000000..f9811c05b
--- /dev/null
+++ b/OnDiskPt/Jamfile
@@ -0,0 +1,2 @@
+lib OnDiskPt : OnDiskWrapper.cpp SourcePhrase.cpp TargetPhrase.cpp Word.cpp Phrase.cpp PhraseNode.cpp TargetPhraseCollection.cpp Vocab.cpp ../moses/src//headers ;
+exe CreateOnDisk : Main.cpp ../moses/src//moses OnDiskPt ;
diff --git a/CreateOnDisk/src/Main.cpp b/OnDiskPt/Main.cpp
index 74af77f46..d8899def5 100644
--- a/CreateOnDisk/src/Main.cpp
+++ b/OnDiskPt/Main.cpp
@@ -23,15 +23,15 @@
#include <string>
#include <vector>
#include <iterator>
-#include "../../moses/src/InputFileStream.h"
-#include "../../moses/src/Util.h"
-#include "../../moses/src/UserMessage.h"
-#include "../../OnDiskPt/src/OnDiskWrapper.h"
-#include "../../OnDiskPt/src/SourcePhrase.h"
-#include "../../OnDiskPt/src/TargetPhrase.h"
-#include "../../OnDiskPt/src/TargetPhraseCollection.h"
-#include "../../OnDiskPt/src/Word.h"
-#include "../../OnDiskPt/src/Vocab.h"
+#include "../moses/src/InputFileStream.h"
+#include "../moses/src/Util.h"
+#include "../moses/src/UserMessage.h"
+#include "../OnDiskPt/OnDiskWrapper.h"
+#include "../OnDiskPt/SourcePhrase.h"
+#include "../OnDiskPt/TargetPhrase.h"
+#include "../OnDiskPt/TargetPhraseCollection.h"
+#include "../OnDiskPt/Word.h"
+#include "../OnDiskPt/Vocab.h"
#include "Main.h"
using namespace std;
diff --git a/CreateOnDisk/src/Main.h b/OnDiskPt/Main.h
index cd677f596..41a24a239 100644
--- a/CreateOnDisk/src/Main.h
+++ b/OnDiskPt/Main.h
@@ -19,8 +19,8 @@
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
#include <string>
-#include "../../OnDiskPt/src/SourcePhrase.h"
-#include "../../OnDiskPt/src/TargetPhrase.h"
+#include "../OnDiskPt/SourcePhrase.h"
+#include "../OnDiskPt/TargetPhrase.h"
typedef std::pair<size_t, size_t> AlignPair;
typedef std::vector<AlignPair> AlignType;
diff --git a/OnDiskPt/src/OnDiskWrapper.cpp b/OnDiskPt/OnDiskWrapper.cpp
index 276f41024..79b0563a8 100644
--- a/OnDiskPt/src/OnDiskWrapper.cpp
+++ b/OnDiskPt/OnDiskWrapper.cpp
@@ -21,7 +21,7 @@
#include <direct.h>
#endif
#include <sys/stat.h>
-#include <cassert>
+#include "util/check.hh"
#include <string>
#include "OnDiskWrapper.h"
@@ -56,19 +56,19 @@ bool OnDiskWrapper::BeginLoad(const std::string &filePath)
bool OnDiskWrapper::OpenForLoad(const std::string &filePath)
{
m_fileSource.open((filePath + "/Source.dat").c_str(), ios::in | ios::binary);
- assert(m_fileSource.is_open());
+ CHECK(m_fileSource.is_open());
m_fileTargetInd.open((filePath + "/TargetInd.dat").c_str(), ios::in | ios::binary);
- assert(m_fileTargetInd.is_open());
+ CHECK(m_fileTargetInd.is_open());
m_fileTargetColl.open((filePath + "/TargetColl.dat").c_str(), ios::in | ios::binary);
- assert(m_fileTargetColl.is_open());
+ CHECK(m_fileTargetColl.is_open());
m_fileVocab.open((filePath + "/Vocab.dat").c_str(), ios::in);
- assert(m_fileVocab.is_open());
+ CHECK(m_fileVocab.is_open());
m_fileMisc.open((filePath + "/Misc.dat").c_str(), ios::in);
- assert(m_fileMisc.is_open());
+ CHECK(m_fileMisc.is_open());
// set up root node
LoadMisc();
@@ -86,7 +86,7 @@ bool OnDiskWrapper::LoadMisc()
while(m_fileMisc.getline(line, 100000)) {
vector<string> tokens;
Moses::Tokenize(tokens, line);
- assert(tokens.size() == 2);
+ CHECK(tokens.size() == 2);
const string &key = tokens[0];
m_miscInfo[key] = Moses::Scan<UINT64>(tokens[1]);
}
@@ -109,33 +109,33 @@ bool OnDiskWrapper::BeginSave(const std::string &filePath
#endif
m_fileSource.open((filePath + "/Source.dat").c_str(), ios::out | ios::in | ios::binary | ios::ate | ios::trunc);
- assert(m_fileSource.is_open());
+ CHECK(m_fileSource.is_open());
m_fileTargetInd.open((filePath + "/TargetInd.dat").c_str(), ios::out | ios::binary | ios::ate | ios::trunc);
- assert(m_fileTargetInd.is_open());
+ CHECK(m_fileTargetInd.is_open());
m_fileTargetColl.open((filePath + "/TargetColl.dat").c_str(), ios::out | ios::binary | ios::ate | ios::trunc);
- assert(m_fileTargetColl.is_open());
+ CHECK(m_fileTargetColl.is_open());
m_fileVocab.open((filePath + "/Vocab.dat").c_str(), ios::out | ios::ate | ios::trunc);
- assert(m_fileVocab.is_open());
+ CHECK(m_fileVocab.is_open());
m_fileMisc.open((filePath + "/Misc.dat").c_str(), ios::out | ios::ate | ios::trunc);
- assert(m_fileMisc.is_open());
+ CHECK(m_fileMisc.is_open());
// offset by 1. 0 offset is reserved
char c = 0xff;
m_fileSource.write(&c, 1);
- assert(1 == m_fileSource.tellp());
+ CHECK(1 == m_fileSource.tellp());
m_fileTargetInd.write(&c, 1);
- assert(1 == m_fileTargetInd.tellp());
+ CHECK(1 == m_fileTargetInd.tellp());
m_fileTargetColl.write(&c, 1);
- assert(1 == m_fileTargetColl.tellp());
+ CHECK(1 == m_fileTargetColl.tellp());
// set up root node
- assert(GetNumCounts() == 1);
+ CHECK(GetNumCounts() == 1);
vector<float> counts(GetNumCounts());
counts[0] = DEFAULT_COUNT;
m_rootSourceNode = new PhraseNode();
@@ -147,7 +147,7 @@ bool OnDiskWrapper::BeginSave(const std::string &filePath
void OnDiskWrapper::EndSave()
{
bool ret = m_rootSourceNode->Saved();
- assert(ret);
+ CHECK(ret);
GetVocab().Save(*this);
@@ -184,7 +184,7 @@ UINT64 OnDiskWrapper::GetMisc(const std::string &key) const
{
std::map<std::string, UINT64>::const_iterator iter;
iter = m_miscInfo.find(key);
- assert(iter != m_miscInfo.end());
+ CHECK(iter != m_miscInfo.end());
return iter->second;
}
@@ -205,7 +205,7 @@ Word *OnDiskWrapper::ConvertFromMoses(Moses::FactorDirection /* direction */
size_t factorType = factorsVec[ind];
const Moses::Factor *factor = origWord.GetFactor(factorType);
- assert(factor);
+ CHECK(factor);
string str = factor->GetString();
if (isNonTerminal) {
diff --git a/OnDiskPt/src/OnDiskWrapper.h b/OnDiskPt/OnDiskWrapper.h
index 2d3d6ed64..c49afdda1 100644
--- a/OnDiskPt/src/OnDiskWrapper.h
+++ b/OnDiskPt/OnDiskWrapper.h
@@ -22,7 +22,7 @@
#include <fstream>
#include "Vocab.h"
#include "PhraseNode.h"
-#include "../../moses/src/Word.h"
+#include "../moses/src/Word.h"
namespace OnDiskPt
{
diff --git a/OnDiskPt/src/Phrase.cpp b/OnDiskPt/Phrase.cpp
index 1b7fb54e3..b6ccd0721 100644
--- a/OnDiskPt/src/Phrase.cpp
+++ b/OnDiskPt/Phrase.cpp
@@ -18,8 +18,8 @@
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
#include <iostream>
-#include <cassert>
-#include "../../moses/src/Util.h"
+#include "util/check.hh"
+#include "../moses/src/Util.h"
#include "Phrase.h"
using namespace std;
@@ -49,7 +49,7 @@ void Phrase::AddWord(Word *word)
void Phrase::AddWord(Word *word, size_t pos)
{
- assert(pos < m_words.size());
+ CHECK(pos < m_words.size());
m_words.insert(m_words.begin() + pos + 1, word);
}
@@ -73,7 +73,7 @@ int Phrase::Compare(const Phrase &compare) const
}
if (ret == 0) {
- assert(compare.GetSize() >= GetSize());
+ CHECK(compare.GetSize() >= GetSize());
ret = (compare.GetSize() > GetSize()) ? 1 : 0;
}
return ret;
diff --git a/OnDiskPt/src/Phrase.h b/OnDiskPt/Phrase.h
index 093510e64..093510e64 100644
--- a/OnDiskPt/src/Phrase.h
+++ b/OnDiskPt/Phrase.h
diff --git a/OnDiskPt/src/PhraseNode.cpp b/OnDiskPt/PhraseNode.cpp
index 8d3849ee5..98a55dbc1 100644
--- a/OnDiskPt/src/PhraseNode.cpp
+++ b/OnDiskPt/PhraseNode.cpp
@@ -17,12 +17,12 @@
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include "PhraseNode.h"
#include "OnDiskWrapper.h"
#include "TargetPhraseCollection.h"
#include "SourcePhrase.h"
-#include "../../moses/src/Util.h"
+#include "../moses/src/Util.h"
using namespace std;
@@ -55,7 +55,7 @@ PhraseNode::PhraseNode(UINT64 filePos, OnDiskWrapper &onDiskWrapper)
std::fstream &file = onDiskWrapper.GetFileSource();
file.seekg(filePos);
- assert(filePos == file.tellg());
+ CHECK(filePos == file.tellg());
file.read((char*) &m_numChildrenLoad, sizeof(UINT64));
@@ -64,11 +64,11 @@ PhraseNode::PhraseNode(UINT64 filePos, OnDiskWrapper &onDiskWrapper)
// go to start of node again
file.seekg(filePos);
- assert(filePos == file.tellg());
+ CHECK(filePos == file.tellg());
// read everything into memory
file.read(m_memLoad, memAlloc);
- assert(filePos + memAlloc == file.tellg());
+ CHECK(filePos + memAlloc == file.tellg());
// get value
m_value = ((UINT64*)m_memLoad)[1];
@@ -76,7 +76,7 @@ PhraseNode::PhraseNode(UINT64 filePos, OnDiskWrapper &onDiskWrapper)
// get counts
float *memFloat = (float*) (m_memLoad + sizeof(UINT64) * 2);
- assert(countSize == 1);
+ CHECK(countSize == 1);
m_counts[0] = memFloat[0];
m_memLoadLast = m_memLoad + memAlloc;
@@ -85,7 +85,7 @@ PhraseNode::PhraseNode(UINT64 filePos, OnDiskWrapper &onDiskWrapper)
PhraseNode::~PhraseNode()
{
free(m_memLoad);
- //assert(m_saved);
+ //CHECK(m_saved);
}
float PhraseNode::GetCount(size_t ind) const
@@ -95,7 +95,7 @@ float PhraseNode::GetCount(size_t ind) const
void PhraseNode::Save(OnDiskWrapper &onDiskWrapper, size_t pos, size_t tableLimit)
{
- assert(!m_saved);
+ CHECK(!m_saved);
// save this node
m_targetPhraseColl.Sort(tableLimit);
@@ -116,7 +116,7 @@ void PhraseNode::Save(OnDiskWrapper &onDiskWrapper, size_t pos, size_t tableLimi
// count info
float *memFloat = (float*) (mem + memUsed);
- assert(numCounts == 1);
+ CHECK(numCounts == 1);
memFloat[0] = (m_counts.size() == 0) ? DEFAULT_COUNT : m_counts[0]; // if count = 0, put in very large num to make sure its still used. HACK
memUsed += sizeof(float) * numCounts;
@@ -142,7 +142,7 @@ void PhraseNode::Save(OnDiskWrapper &onDiskWrapper, size_t pos, size_t tableLimi
// save this node
//Moses::DebugMem(mem, memAlloc);
- assert(memUsed == memAlloc);
+ CHECK(memUsed == memAlloc);
std::fstream &file = onDiskWrapper.GetFileSource();
m_filePos = file.tellp();
@@ -150,7 +150,7 @@ void PhraseNode::Save(OnDiskWrapper &onDiskWrapper, size_t pos, size_t tableLimi
file.write(mem, memUsed);
UINT64 endPos = file.tellp();
- assert(m_filePos + memUsed == endPos);
+ CHECK(m_filePos + memUsed == endPos);
free(mem);
@@ -234,7 +234,7 @@ void PhraseNode::GetChild(Word &wordFound, UINT64 &childFilePos, size_t ind, OnD
+ childSize * ind;
size_t memRead = ReadChild(wordFound, childFilePos, currMem, numFactors);
- assert(memRead == childSize);
+ CHECK(memRead == childSize);
}
size_t PhraseNode::ReadChild(Word &wordFound, UINT64 &childFilePos, const char *mem, size_t numFactors) const
diff --git a/OnDiskPt/src/PhraseNode.h b/OnDiskPt/PhraseNode.h
index 279ca278a..279ca278a 100644
--- a/OnDiskPt/src/PhraseNode.h
+++ b/OnDiskPt/PhraseNode.h
diff --git a/OnDiskPt/src/SourcePhrase.cpp b/OnDiskPt/SourcePhrase.cpp
index 7c95e5ec4..595748c70 100644
--- a/OnDiskPt/src/SourcePhrase.cpp
+++ b/OnDiskPt/SourcePhrase.cpp
@@ -17,7 +17,7 @@
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include "SourcePhrase.h"
namespace OnDiskPt
diff --git a/OnDiskPt/src/SourcePhrase.h b/OnDiskPt/SourcePhrase.h
index b4ae46705..b4ae46705 100644
--- a/OnDiskPt/src/SourcePhrase.h
+++ b/OnDiskPt/SourcePhrase.h
diff --git a/OnDiskPt/src/TargetPhrase.cpp b/OnDiskPt/TargetPhrase.cpp
index 2b62255e4..b740811d8 100644
--- a/OnDiskPt/src/TargetPhrase.cpp
+++ b/OnDiskPt/TargetPhrase.cpp
@@ -20,10 +20,10 @@
#include <algorithm>
#include <iostream>
-#include "../../moses/src/Util.h"
-#include "../../moses/src/TargetPhrase.h"
-#include "../../moses/src/PhraseDictionary.h"
-#include "../../moses/src/DummyScoreProducers.h"
+#include "../moses/src/Util.h"
+#include "../moses/src/TargetPhrase.h"
+#include "../moses/src/PhraseDictionary.h"
+#include "../moses/src/DummyScoreProducers.h"
#include "TargetPhrase.h"
#include "OnDiskWrapper.h"
@@ -57,13 +57,13 @@ void TargetPhrase::Create1AlignFromString(const std::string &align1Str)
{
vector<size_t> alignPoints;
Moses::Tokenize<size_t>(alignPoints, align1Str, "-");
- assert(alignPoints.size() == 2);
+ CHECK(alignPoints.size() == 2);
m_align.push_back(pair<size_t, size_t>(alignPoints[0], alignPoints[1]) );
}
void TargetPhrase::SetScore(float score, size_t ind)
{
- assert(ind < m_scores.size());
+ CHECK(ind < m_scores.size());
m_scores[ind] = score;
}
@@ -101,7 +101,7 @@ char *TargetPhrase::WriteToMemory(OnDiskWrapper &onDiskWrapper, size_t &memUsed)
memUsed += word.WriteToMemory((char*) currPtr);
}
- assert(memUsed == memNeeded);
+ CHECK(memUsed == memNeeded);
return (char *) mem;
}
@@ -119,7 +119,7 @@ void TargetPhrase::Save(OnDiskWrapper &onDiskWrapper)
file.write(mem, memUsed);
UINT64 endPos = file.tellp();
- assert(startPos + memUsed == endPos);
+ CHECK(startPos + memUsed == endPos);
m_filePos = startPos;
free(mem);
@@ -151,7 +151,7 @@ char *TargetPhrase::WriteOtherInfoToMemory(OnDiskWrapper &onDiskWrapper, size_t
memUsed += WriteScoresToMemory(mem + memUsed);
//DebugMem(mem, memNeeded);
- assert(memNeeded == memUsed);
+ CHECK(memNeeded == memUsed);
return mem;
}
@@ -203,7 +203,7 @@ Moses::TargetPhrase *TargetPhrase::ConvertToMoses(const std::vector<Moses::Facto
// words
size_t phraseSize = GetSize();
- assert(phraseSize > 0); // last word is lhs
+ CHECK(phraseSize > 0); // last word is lhs
--phraseSize;
for (size_t pos = 0; pos < phraseSize; ++pos) {
@@ -232,18 +232,18 @@ Moses::TargetPhrase *TargetPhrase::ConvertToMoses(const std::vector<Moses::Facto
UINT64 TargetPhrase::ReadOtherInfoFromFile(UINT64 filePos, std::fstream &fileTPColl)
{
- assert(filePos == fileTPColl.tellg());
+ CHECK(filePos == fileTPColl.tellg());
UINT64 memUsed = 0;
fileTPColl.read((char*) &m_filePos, sizeof(UINT64));
memUsed += sizeof(UINT64);
- assert(m_filePos != 0);
+ CHECK(m_filePos != 0);
memUsed += ReadAlignFromFile(fileTPColl);
- assert((memUsed + filePos) == fileTPColl.tellg());
+ CHECK((memUsed + filePos) == fileTPColl.tellg());
memUsed += ReadScoresFromFile(fileTPColl);
- assert((memUsed + filePos) == fileTPColl.tellg());
+ CHECK((memUsed + filePos) == fileTPColl.tellg());
return memUsed;
}
@@ -289,7 +289,7 @@ UINT64 TargetPhrase::ReadAlignFromFile(std::fstream &fileTPColl)
UINT64 TargetPhrase::ReadScoresFromFile(std::fstream &fileTPColl)
{
- assert(m_scores.size() > 0);
+ CHECK(m_scores.size() > 0);
UINT64 bytesRead = 0;
diff --git a/OnDiskPt/src/TargetPhrase.h b/OnDiskPt/TargetPhrase.h
index 56c7b6d3f..56c7b6d3f 100644
--- a/OnDiskPt/src/TargetPhrase.h
+++ b/OnDiskPt/TargetPhrase.h
diff --git a/OnDiskPt/src/TargetPhraseCollection.cpp b/OnDiskPt/TargetPhraseCollection.cpp
index 910af9ea9..b57ce4ee3 100644
--- a/OnDiskPt/src/TargetPhraseCollection.cpp
+++ b/OnDiskPt/TargetPhraseCollection.cpp
@@ -20,9 +20,9 @@
#include <algorithm>
#include <iostream>
-#include "../../moses/src/Util.h"
-#include "../../moses/src/TargetPhraseCollection.h"
-#include "../../moses/src/PhraseDictionary.h"
+#include "../moses/src/Util.h"
+#include "../moses/src/TargetPhraseCollection.h"
+#include "../moses/src/PhraseDictionary.h"
#include "TargetPhraseCollection.h"
#include "Vocab.h"
#include "OnDiskWrapper.h"
@@ -107,7 +107,7 @@ void TargetPhraseCollection::Save(OnDiskWrapper &onDiskWrapper)
free(mem);
UINT64 endPos = file.tellp();
- assert(startPos + memUsed == endPos);
+ CHECK(startPos + memUsed == endPos);
m_filePos = startPos;
diff --git a/OnDiskPt/src/TargetPhraseCollection.h b/OnDiskPt/TargetPhraseCollection.h
index 6d95fb356..6d95fb356 100644
--- a/OnDiskPt/src/TargetPhraseCollection.h
+++ b/OnDiskPt/TargetPhraseCollection.h
diff --git a/OnDiskPt/src/Vocab.cpp b/OnDiskPt/Vocab.cpp
index dd641cbfb..86072edc6 100644
--- a/OnDiskPt/src/Vocab.cpp
+++ b/OnDiskPt/Vocab.cpp
@@ -21,7 +21,7 @@
#include <fstream>
#include "OnDiskWrapper.h"
#include "Vocab.h"
-#include "../../moses/src/FactorCollection.h"
+#include "../moses/src/FactorCollection.h"
using namespace std;
@@ -36,7 +36,7 @@ bool Vocab::Load(OnDiskWrapper &onDiskWrapper)
while(getline(file, line)) {
vector<string> tokens;
Moses::Tokenize(tokens, line);
- assert(tokens.size() == 2);
+ CHECK(tokens.size() == 2);
const string &key = tokens[0];
m_vocabColl[key] = Moses::Scan<UINT64>(tokens[1]);
}
diff --git a/OnDiskPt/src/Vocab.h b/OnDiskPt/Vocab.h
index 9bb361251..360aedf4a 100644
--- a/OnDiskPt/src/Vocab.h
+++ b/OnDiskPt/Vocab.h
@@ -20,7 +20,7 @@
***********************************************************************/
#include <string>
#include <map>
-#include "../../moses/src/TypeDef.h"
+#include "../moses/src/TypeDef.h"
namespace Moses
{
diff --git a/OnDiskPt/src/Word.cpp b/OnDiskPt/Word.cpp
index d1aeaf6f9..a8d4c683a 100644
--- a/OnDiskPt/src/Word.cpp
+++ b/OnDiskPt/Word.cpp
@@ -18,8 +18,8 @@
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include "../../moses/src/Util.h"
-#include "../../moses/src/Word.h"
+#include "../moses/src/Util.h"
+#include "../moses/src/Word.h"
#include "Word.h"
using namespace std;
@@ -93,7 +93,7 @@ size_t Word::ReadFromFile(std::fstream &file, size_t numFactors)
file.read(mem, memAlloc);
size_t memUsed = ReadFromMemory(mem, numFactors);
- assert(memAlloc == memUsed);
+ CHECK(memAlloc == memUsed);
free(mem);
return memUsed;
diff --git a/OnDiskPt/src/Word.h b/OnDiskPt/Word.h
index 0c0b55a09..0c0b55a09 100644
--- a/OnDiskPt/src/Word.h
+++ b/OnDiskPt/Word.h
diff --git a/OnDiskPt/src/Makefile.am b/OnDiskPt/src/Makefile.am
deleted file mode 100644
index 7070e372a..000000000
--- a/OnDiskPt/src/Makefile.am
+++ /dev/null
@@ -1,14 +0,0 @@
-lib_LIBRARIES = libOnDiskPt.a
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES $(BOOST_CPPFLAGS)
-libOnDiskPt_a_SOURCES = \
- OnDiskWrapper.cpp \
- SourcePhrase.cpp \
- TargetPhrase.cpp \
- Word.cpp \
- Phrase.cpp \
- PhraseNode.cpp \
- TargetPhraseCollection.cpp \
- Vocab.cpp
-
-
-
diff --git a/bjam b/bjam
new file mode 100755
index 000000000..962d5a87a
--- /dev/null
+++ b/bjam
@@ -0,0 +1,22 @@
+#!/bin/bash
+set -e
+if
+ which bjam >/dev/null 2>/dev/null && #Have a bjam in path
+ ! grep UFIHGUFIHBDJKNCFZXAEVA "$(which bjam)" >/dev/null && #bjam in path isn't this script
+ bjam --help >/dev/null 2>/dev/null && #bjam in path isn't broken (i.e. has boost-build)
+ bjam --version |grep "Boost.Build 201" >/dev/null 2>/dev/null #It's recent enough.
+then
+ #Delegate to system bjam
+ exec bjam "$@"
+fi
+
+top="$(dirname "$0")"
+if [ ! -x "$top"/jam-files/bjam ]; then
+ pushd "$top/jam-files/engine"
+ ./build.sh
+ cp -f bin.*/bjam ../bjam
+ popd
+fi
+
+export BOOST_BUILD_PATH="$top"/jam-files/boost-build
+exec "$top"/jam-files/bjam "$@"
diff --git a/config.guess b/config.guess
deleted file mode 100755
index 917bbc50f..000000000
--- a/config.guess
+++ /dev/null
@@ -1,1463 +0,0 @@
-#! /bin/sh
-# Attempt to guess a canonical system name.
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
-# 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
-
-timestamp='2005-07-08'
-
-# This file is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
-# 02110-1301, USA.
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-
-# Originally written by Per Bothner <per@bothner.com>.
-# Please send patches to <config-patches@gnu.org>. Submit a context
-# diff and a properly formatted ChangeLog entry.
-#
-# This script attempts to guess a canonical system name similar to
-# config.sub. If it succeeds, it prints the system name on stdout, and
-# exits with 0. Otherwise, it exits with 1.
-#
-# The plan is that this can be called by configure scripts if you
-# don't specify an explicit build system type.
-
-me=`echo "$0" | sed -e 's,.*/,,'`
-
-usage="\
-Usage: $0 [OPTION]
-
-Output the configuration name of the system \`$me' is run on.
-
-Operation modes:
- -h, --help print this help, then exit
- -t, --time-stamp print date of last modification, then exit
- -v, --version print version number, then exit
-
-Report bugs and patches to <config-patches@gnu.org>."
-
-version="\
-GNU config.guess ($timestamp)
-
-Originally written by Per Bothner.
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
-Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions. There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
- case $1 in
- --time-stamp | --time* | -t )
- echo "$timestamp" ; exit ;;
- --version | -v )
- echo "$version" ; exit ;;
- --help | --h* | -h )
- echo "$usage"; exit ;;
- -- ) # Stop option processing
- shift; break ;;
- - ) # Use stdin as input.
- break ;;
- -* )
- echo "$me: invalid option $1$help" >&2
- exit 1 ;;
- * )
- break ;;
- esac
-done
-
-if test $# != 0; then
- echo "$me: too many arguments$help" >&2
- exit 1
-fi
-
-trap 'exit 1' 1 2 15
-
-# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
-# compiler to aid in system detection is discouraged as it requires
-# temporary files to be created and, as you can see below, it is a
-# headache to deal with in a portable fashion.
-
-# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
-# use `HOST_CC' if defined, but it is deprecated.
-
-# Portable tmp directory creation inspired by the Autoconf team.
-
-set_cc_for_build='
-trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
-trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
-: ${TMPDIR=/tmp} ;
- { tmp=`(umask 077 && mktemp -d -q "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
- { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
- { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
- { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
-dummy=$tmp/dummy ;
-tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
-case $CC_FOR_BUILD,$HOST_CC,$CC in
- ,,) echo "int x;" > $dummy.c ;
- for c in cc gcc c89 c99 ; do
- if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
- CC_FOR_BUILD="$c"; break ;
- fi ;
- done ;
- if test x"$CC_FOR_BUILD" = x ; then
- CC_FOR_BUILD=no_compiler_found ;
- fi
- ;;
- ,,*) CC_FOR_BUILD=$CC ;;
- ,*,*) CC_FOR_BUILD=$HOST_CC ;;
-esac ; set_cc_for_build= ;'
-
-# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
-# (ghazi@noc.rutgers.edu 1994-08-24)
-if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
- PATH=$PATH:/.attbin ; export PATH
-fi
-
-UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
-UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
-UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
-UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
-
-# Note: order is significant - the case branches are not exclusive.
-
-case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
- *:NetBSD:*:*)
- # NetBSD (nbsd) targets should (where applicable) match one or
- # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
- # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
- # switched to ELF, *-*-netbsd* would select the old
- # object file format. This provides both forward
- # compatibility and a consistent mechanism for selecting the
- # object file format.
- #
- # Note: NetBSD doesn't particularly care about the vendor
- # portion of the name. We always set it to "unknown".
- sysctl="sysctl -n hw.machine_arch"
- UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
- /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
- case "${UNAME_MACHINE_ARCH}" in
- armeb) machine=armeb-unknown ;;
- arm*) machine=arm-unknown ;;
- sh3el) machine=shl-unknown ;;
- sh3eb) machine=sh-unknown ;;
- *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
- esac
- # The Operating System including object format, if it has switched
- # to ELF recently, or will in the future.
- case "${UNAME_MACHINE_ARCH}" in
- arm*|i386|m68k|ns32k|sh3*|sparc|vax)
- eval $set_cc_for_build
- if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
- | grep __ELF__ >/dev/null
- then
- # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
- # Return netbsd for either. FIX?
- os=netbsd
- else
- os=netbsdelf
- fi
- ;;
- *)
- os=netbsd
- ;;
- esac
- # The OS release
- # Debian GNU/NetBSD machines have a different userland, and
- # thus, need a distinct triplet. However, they do not need
- # kernel version information, so it can be replaced with a
- # suitable tag, in the style of linux-gnu.
- case "${UNAME_VERSION}" in
- Debian*)
- release='-gnu'
- ;;
- *)
- release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
- ;;
- esac
- # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
- # contains redundant information, the shorter form:
- # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
- echo "${machine}-${os}${release}"
- exit ;;
- *:OpenBSD:*:*)
- UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
- echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
- exit ;;
- *:ekkoBSD:*:*)
- echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
- exit ;;
- macppc:MirBSD:*:*)
- echo powerppc-unknown-mirbsd${UNAME_RELEASE}
- exit ;;
- *:MirBSD:*:*)
- echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
- exit ;;
- alpha:OSF1:*:*)
- case $UNAME_RELEASE in
- *4.0)
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
- ;;
- *5.*)
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
- ;;
- esac
- # According to Compaq, /usr/sbin/psrinfo has been available on
- # OSF/1 and Tru64 systems produced since 1995. I hope that
- # covers most systems running today. This code pipes the CPU
- # types through head -n 1, so we only detect the type of CPU 0.
- ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
- case "$ALPHA_CPU_TYPE" in
- "EV4 (21064)")
- UNAME_MACHINE="alpha" ;;
- "EV4.5 (21064)")
- UNAME_MACHINE="alpha" ;;
- "LCA4 (21066/21068)")
- UNAME_MACHINE="alpha" ;;
- "EV5 (21164)")
- UNAME_MACHINE="alphaev5" ;;
- "EV5.6 (21164A)")
- UNAME_MACHINE="alphaev56" ;;
- "EV5.6 (21164PC)")
- UNAME_MACHINE="alphapca56" ;;
- "EV5.7 (21164PC)")
- UNAME_MACHINE="alphapca57" ;;
- "EV6 (21264)")
- UNAME_MACHINE="alphaev6" ;;
- "EV6.7 (21264A)")
- UNAME_MACHINE="alphaev67" ;;
- "EV6.8CB (21264C)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.8AL (21264B)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.8CX (21264D)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.9A (21264/EV69A)")
- UNAME_MACHINE="alphaev69" ;;
- "EV7 (21364)")
- UNAME_MACHINE="alphaev7" ;;
- "EV7.9 (21364A)")
- UNAME_MACHINE="alphaev79" ;;
- esac
- # A Pn.n version is a patched version.
- # A Vn.n version is a released version.
- # A Tn.n version is a released field test version.
- # A Xn.n version is an unreleased experimental baselevel.
- # 1.2 uses "1.2" for uname -r.
- echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
- exit ;;
- Alpha\ *:Windows_NT*:*)
- # How do we know it's Interix rather than the generic POSIX subsystem?
- # Should we change UNAME_MACHINE based on the output of uname instead
- # of the specific Alpha model?
- echo alpha-pc-interix
- exit ;;
- 21064:Windows_NT:50:3)
- echo alpha-dec-winnt3.5
- exit ;;
- Amiga*:UNIX_System_V:4.0:*)
- echo m68k-unknown-sysv4
- exit ;;
- *:[Aa]miga[Oo][Ss]:*:*)
- echo ${UNAME_MACHINE}-unknown-amigaos
- exit ;;
- *:[Mm]orph[Oo][Ss]:*:*)
- echo ${UNAME_MACHINE}-unknown-morphos
- exit ;;
- *:OS/390:*:*)
- echo i370-ibm-openedition
- exit ;;
- *:z/VM:*:*)
- echo s390-ibm-zvmoe
- exit ;;
- *:OS400:*:*)
- echo powerpc-ibm-os400
- exit ;;
- arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
- echo arm-acorn-riscix${UNAME_RELEASE}
- exit ;;
- arm:riscos:*:*|arm:RISCOS:*:*)
- echo arm-unknown-riscos
- exit ;;
- SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
- echo hppa1.1-hitachi-hiuxmpp
- exit ;;
- Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
- # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
- if test "`(/bin/universe) 2>/dev/null`" = att ; then
- echo pyramid-pyramid-sysv3
- else
- echo pyramid-pyramid-bsd
- fi
- exit ;;
- NILE*:*:*:dcosx)
- echo pyramid-pyramid-svr4
- exit ;;
- DRS?6000:unix:4.0:6*)
- echo sparc-icl-nx6
- exit ;;
- DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
- case `/usr/bin/uname -p` in
- sparc) echo sparc-icl-nx7; exit ;;
- esac ;;
- sun4H:SunOS:5.*:*)
- echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
- echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- i86pc:SunOS:5.*:*)
- echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:6*:*)
- # According to config.sub, this is the proper way to canonicalize
- # SunOS6. Hard to guess exactly what SunOS6 will be like, but
- # it's likely to be more like Solaris than SunOS4.
- echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:*:*)
- case "`/usr/bin/arch -k`" in
- Series*|S4*)
- UNAME_RELEASE=`uname -v`
- ;;
- esac
- # Japanese Language versions have a version number like `4.1.3-JL'.
- echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
- exit ;;
- sun3*:SunOS:*:*)
- echo m68k-sun-sunos${UNAME_RELEASE}
- exit ;;
- sun*:*:4.2BSD:*)
- UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
- test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
- case "`/bin/arch`" in
- sun3)
- echo m68k-sun-sunos${UNAME_RELEASE}
- ;;
- sun4)
- echo sparc-sun-sunos${UNAME_RELEASE}
- ;;
- esac
- exit ;;
- aushp:SunOS:*:*)
- echo sparc-auspex-sunos${UNAME_RELEASE}
- exit ;;
- # The situation for MiNT is a little confusing. The machine name
- # can be virtually everything (everything which is not
- # "atarist" or "atariste" at least should have a processor
- # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
- # to the lowercase version "mint" (or "freemint"). Finally
- # the system name "TOS" denotes a system which is actually not
- # MiNT. But MiNT is downward compatible to TOS, so this should
- # be no problem.
- atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
- echo m68k-milan-mint${UNAME_RELEASE}
- exit ;;
- hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
- echo m68k-hades-mint${UNAME_RELEASE}
- exit ;;
- *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
- echo m68k-unknown-mint${UNAME_RELEASE}
- exit ;;
- m68k:machten:*:*)
- echo m68k-apple-machten${UNAME_RELEASE}
- exit ;;
- powerpc:machten:*:*)
- echo powerpc-apple-machten${UNAME_RELEASE}
- exit ;;
- RISC*:Mach:*:*)
- echo mips-dec-mach_bsd4.3
- exit ;;
- RISC*:ULTRIX:*:*)
- echo mips-dec-ultrix${UNAME_RELEASE}
- exit ;;
- VAX*:ULTRIX*:*:*)
- echo vax-dec-ultrix${UNAME_RELEASE}
- exit ;;
- 2020:CLIX:*:* | 2430:CLIX:*:*)
- echo clipper-intergraph-clix${UNAME_RELEASE}
- exit ;;
- mips:*:*:UMIPS | mips:*:*:RISCos)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
-#ifdef __cplusplus
-#include <stdio.h> /* for printf() prototype */
- int main (int argc, char *argv[]) {
-#else
- int main (argc, argv) int argc; char *argv[]; {
-#endif
- #if defined (host_mips) && defined (MIPSEB)
- #if defined (SYSTYPE_SYSV)
- printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
- #endif
- #if defined (SYSTYPE_SVR4)
- printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
- #endif
- #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
- printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
- #endif
- #endif
- exit (-1);
- }
-EOF
- $CC_FOR_BUILD -o $dummy $dummy.c &&
- dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
- SYSTEM_NAME=`$dummy $dummyarg` &&
- { echo "$SYSTEM_NAME"; exit; }
- echo mips-mips-riscos${UNAME_RELEASE}
- exit ;;
- Motorola:PowerMAX_OS:*:*)
- echo powerpc-motorola-powermax
- exit ;;
- Motorola:*:4.3:PL8-*)
- echo powerpc-harris-powermax
- exit ;;
- Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
- echo powerpc-harris-powermax
- exit ;;
- Night_Hawk:Power_UNIX:*:*)
- echo powerpc-harris-powerunix
- exit ;;
- m88k:CX/UX:7*:*)
- echo m88k-harris-cxux7
- exit ;;
- m88k:*:4*:R4*)
- echo m88k-motorola-sysv4
- exit ;;
- m88k:*:3*:R3*)
- echo m88k-motorola-sysv3
- exit ;;
- AViiON:dgux:*:*)
- # DG/UX returns AViiON for all architectures
- UNAME_PROCESSOR=`/usr/bin/uname -p`
- if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
- then
- if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
- [ ${TARGET_BINARY_INTERFACE}x = x ]
- then
- echo m88k-dg-dgux${UNAME_RELEASE}
- else
- echo m88k-dg-dguxbcs${UNAME_RELEASE}
- fi
- else
- echo i586-dg-dgux${UNAME_RELEASE}
- fi
- exit ;;
- M88*:DolphinOS:*:*) # DolphinOS (SVR3)
- echo m88k-dolphin-sysv3
- exit ;;
- M88*:*:R3*:*)
- # Delta 88k system running SVR3
- echo m88k-motorola-sysv3
- exit ;;
- XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
- echo m88k-tektronix-sysv3
- exit ;;
- Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
- echo m68k-tektronix-bsd
- exit ;;
- *:IRIX*:*:*)
- echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
- exit ;;
- ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
- echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id
- exit ;; # Note that: echo "'`uname -s`'" gives 'AIX '
- i*86:AIX:*:*)
- echo i386-ibm-aix
- exit ;;
- ia64:AIX:*:*)
- if [ -x /usr/bin/oslevel ] ; then
- IBM_REV=`/usr/bin/oslevel`
- else
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
- fi
- echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
- exit ;;
- *:AIX:2:3)
- if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #include <sys/systemcfg.h>
-
- main()
- {
- if (!__power_pc())
- exit(1);
- puts("powerpc-ibm-aix3.2.5");
- exit(0);
- }
-EOF
- if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
- then
- echo "$SYSTEM_NAME"
- else
- echo rs6000-ibm-aix3.2.5
- fi
- elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
- echo rs6000-ibm-aix3.2.4
- else
- echo rs6000-ibm-aix3.2
- fi
- exit ;;
- *:AIX:*:[45])
- IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
- if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
- IBM_ARCH=rs6000
- else
- IBM_ARCH=powerpc
- fi
- if [ -x /usr/bin/oslevel ] ; then
- IBM_REV=`/usr/bin/oslevel`
- else
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
- fi
- echo ${IBM_ARCH}-ibm-aix${IBM_REV}
- exit ;;
- *:AIX:*:*)
- echo rs6000-ibm-aix
- exit ;;
- ibmrt:4.4BSD:*|romp-ibm:BSD:*)
- echo romp-ibm-bsd4.4
- exit ;;
- ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
- echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to
- exit ;; # report: romp-ibm BSD 4.3
- *:BOSX:*:*)
- echo rs6000-bull-bosx
- exit ;;
- DPX/2?00:B.O.S.:*:*)
- echo m68k-bull-sysv3
- exit ;;
- 9000/[34]??:4.3bsd:1.*:*)
- echo m68k-hp-bsd
- exit ;;
- hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
- echo m68k-hp-bsd4.4
- exit ;;
- 9000/[34678]??:HP-UX:*:*)
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
- case "${UNAME_MACHINE}" in
- 9000/31? ) HP_ARCH=m68000 ;;
- 9000/[34]?? ) HP_ARCH=m68k ;;
- 9000/[678][0-9][0-9])
- if [ -x /usr/bin/getconf ]; then
- sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
- sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
- case "${sc_cpu_version}" in
- 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
- 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
- 532) # CPU_PA_RISC2_0
- case "${sc_kernel_bits}" in
- 32) HP_ARCH="hppa2.0n" ;;
- 64) HP_ARCH="hppa2.0w" ;;
- '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20
- esac ;;
- esac
- fi
- if [ "${HP_ARCH}" = "" ]; then
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
-
- #define _HPUX_SOURCE
- #include <stdlib.h>
- #include <unistd.h>
-
- int main ()
- {
- #if defined(_SC_KERNEL_BITS)
- long bits = sysconf(_SC_KERNEL_BITS);
- #endif
- long cpu = sysconf (_SC_CPU_VERSION);
-
- switch (cpu)
- {
- case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
- case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
- case CPU_PA_RISC2_0:
- #if defined(_SC_KERNEL_BITS)
- switch (bits)
- {
- case 64: puts ("hppa2.0w"); break;
- case 32: puts ("hppa2.0n"); break;
- default: puts ("hppa2.0"); break;
- } break;
- #else /* !defined(_SC_KERNEL_BITS) */
- puts ("hppa2.0"); break;
- #endif
- default: puts ("hppa1.0"); break;
- }
- exit (0);
- }
-EOF
- (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
- test -z "$HP_ARCH" && HP_ARCH=hppa
- fi ;;
- esac
- if [ ${HP_ARCH} = "hppa2.0w" ]
- then
- eval $set_cc_for_build
-
- # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
- # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
- # generating 64-bit code. GNU and HP use different nomenclature:
- #
- # $ CC_FOR_BUILD=cc ./config.guess
- # => hppa2.0w-hp-hpux11.23
- # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
- # => hppa64-hp-hpux11.23
-
- if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
- grep __LP64__ >/dev/null
- then
- HP_ARCH="hppa2.0w"
- else
- HP_ARCH="hppa64"
- fi
- fi
- echo ${HP_ARCH}-hp-hpux${HPUX_REV}
- exit ;;
- ia64:HP-UX:*:*)
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
- echo ia64-hp-hpux${HPUX_REV}
- exit ;;
- 3050*:HI-UX:*:*)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #include <unistd.h>
- int
- main ()
- {
- long cpu = sysconf (_SC_CPU_VERSION);
- /* The order matters, because CPU_IS_HP_MC68K erroneously returns
- true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
- results, however. */
- if (CPU_IS_PA_RISC (cpu))
- {
- switch (cpu)
- {
- case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
- case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
- case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
- default: puts ("hppa-hitachi-hiuxwe2"); break;
- }
- }
- else if (CPU_IS_HP_MC68K (cpu))
- puts ("m68k-hitachi-hiuxwe2");
- else puts ("unknown-hitachi-hiuxwe2");
- exit (0);
- }
-EOF
- $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
- { echo "$SYSTEM_NAME"; exit; }
- echo unknown-hitachi-hiuxwe2
- exit ;;
- 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
- echo hppa1.1-hp-bsd
- exit ;;
- 9000/8??:4.3bsd:*:*)
- echo hppa1.0-hp-bsd
- exit ;;
- *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
- echo hppa1.0-hp-mpeix
- exit ;;
- hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
- echo hppa1.1-hp-osf
- exit ;;
- hp8??:OSF1:*:*)
- echo hppa1.0-hp-osf
- exit ;;
- i*86:OSF1:*:*)
- if [ -x /usr/sbin/sysversion ] ; then
- echo ${UNAME_MACHINE}-unknown-osf1mk
- else
- echo ${UNAME_MACHINE}-unknown-osf1
- fi
- exit ;;
- parisc*:Lites*:*:*)
- echo hppa1.1-hp-lites
- exit ;;
- C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
- echo c1-convex-bsd
- exit ;;
- C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
- if getsysinfo -f scalar_acc
- then echo c32-convex-bsd
- else echo c2-convex-bsd
- fi
- exit ;;
- C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
- echo c34-convex-bsd
- exit ;;
- C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
- echo c38-convex-bsd
- exit ;;
- C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
- echo c4-convex-bsd
- exit ;;
- CRAY*Y-MP:*:*:*)
- echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*[A-Z]90:*:*:*)
- echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
- | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
- -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
- -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*TS:*:*:*)
- echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*T3E:*:*:*)
- echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*SV1:*:*:*)
- echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- *:UNICOS/mp:*:*)
- echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
- FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
- FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
- echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
- exit ;;
- 5000:UNIX_System_V:4.*:*)
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
- FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
- echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
- exit ;;
- i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
- echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
- exit ;;
- sparc*:BSD/OS:*:*)
- echo sparc-unknown-bsdi${UNAME_RELEASE}
- exit ;;
- *:BSD/OS:*:*)
- echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
- exit ;;
- *:FreeBSD:*:*)
- echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
- exit ;;
- i*:CYGWIN*:*)
- echo ${UNAME_MACHINE}-pc-cygwin
- exit ;;
- i*:MINGW*:*)
- echo ${UNAME_MACHINE}-pc-mingw32
- exit ;;
- i*:windows32*:*)
- # uname -m includes "-pc" on this system.
- echo ${UNAME_MACHINE}-mingw32
- exit ;;
- i*:PW*:*)
- echo ${UNAME_MACHINE}-pc-pw32
- exit ;;
- x86:Interix*:[34]*)
- echo i586-pc-interix${UNAME_RELEASE}|sed -e 's/\..*//'
- exit ;;
- [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
- echo i${UNAME_MACHINE}-pc-mks
- exit ;;
- i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
- # How do we know it's Interix rather than the generic POSIX subsystem?
- # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
- # UNAME_MACHINE based on the output of uname instead of i386?
- echo i586-pc-interix
- exit ;;
- i*:UWIN*:*)
- echo ${UNAME_MACHINE}-pc-uwin
- exit ;;
- amd64:CYGWIN*:*:*)
- echo x86_64-unknown-cygwin
- exit ;;
- p*:CYGWIN*:*)
- echo powerpcle-unknown-cygwin
- exit ;;
- prep*:SunOS:5.*:*)
- echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- *:GNU:*:*)
- # the GNU system
- echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
- exit ;;
- *:GNU/*:*:*)
- # other systems with GNU libc and userland
- echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
- exit ;;
- i*86:Minix:*:*)
- echo ${UNAME_MACHINE}-pc-minix
- exit ;;
- arm*:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- cris:Linux:*:*)
- echo cris-axis-linux-gnu
- exit ;;
- crisv32:Linux:*:*)
- echo crisv32-axis-linux-gnu
- exit ;;
- frv:Linux:*:*)
- echo frv-unknown-linux-gnu
- exit ;;
- ia64:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- m32r*:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- m68*:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- mips:Linux:*:*)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #undef CPU
- #undef mips
- #undef mipsel
- #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
- CPU=mipsel
- #else
- #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
- CPU=mips
- #else
- CPU=
- #endif
- #endif
-EOF
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=`
- test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
- ;;
- mips64:Linux:*:*)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #undef CPU
- #undef mips64
- #undef mips64el
- #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
- CPU=mips64el
- #else
- #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
- CPU=mips64
- #else
- CPU=
- #endif
- #endif
-EOF
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=`
- test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
- ;;
- ppc:Linux:*:*)
- echo powerpc-unknown-linux-gnu
- exit ;;
- ppc64:Linux:*:*)
- echo powerpc64-unknown-linux-gnu
- exit ;;
- alpha:Linux:*:*)
- case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
- EV5) UNAME_MACHINE=alphaev5 ;;
- EV56) UNAME_MACHINE=alphaev56 ;;
- PCA56) UNAME_MACHINE=alphapca56 ;;
- PCA57) UNAME_MACHINE=alphapca56 ;;
- EV6) UNAME_MACHINE=alphaev6 ;;
- EV67) UNAME_MACHINE=alphaev67 ;;
- EV68*) UNAME_MACHINE=alphaev68 ;;
- esac
- objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null
- if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
- echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
- exit ;;
- parisc:Linux:*:* | hppa:Linux:*:*)
- # Look for CPU level
- case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
- PA7*) echo hppa1.1-unknown-linux-gnu ;;
- PA8*) echo hppa2.0-unknown-linux-gnu ;;
- *) echo hppa-unknown-linux-gnu ;;
- esac
- exit ;;
- parisc64:Linux:*:* | hppa64:Linux:*:*)
- echo hppa64-unknown-linux-gnu
- exit ;;
- s390:Linux:*:* | s390x:Linux:*:*)
- echo ${UNAME_MACHINE}-ibm-linux
- exit ;;
- sh64*:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- sh*:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- sparc:Linux:*:* | sparc64:Linux:*:*)
- echo ${UNAME_MACHINE}-unknown-linux-gnu
- exit ;;
- x86_64:Linux:*:*)
- echo x86_64-unknown-linux-gnu
- exit ;;
- i*86:Linux:*:*)
- # The BFD linker knows what the default object file format is, so
- # first see if it will tell us. cd to the root directory to prevent
- # problems with other programs or directories called `ld' in the path.
- # Set LC_ALL=C to ensure ld outputs messages in English.
- ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \
- | sed -ne '/supported targets:/!d
- s/[ ][ ]*/ /g
- s/.*supported targets: *//
- s/ .*//
- p'`
- case "$ld_supported_targets" in
- elf32-i386)
- TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu"
- ;;
- a.out-i386-linux)
- echo "${UNAME_MACHINE}-pc-linux-gnuaout"
- exit ;;
- coff-i386)
- echo "${UNAME_MACHINE}-pc-linux-gnucoff"
- exit ;;
- "")
- # Either a pre-BFD a.out linker (linux-gnuoldld) or
- # one that does not give us useful --help.
- echo "${UNAME_MACHINE}-pc-linux-gnuoldld"
- exit ;;
- esac
- # Determine whether the default compiler is a.out or elf
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #include <features.h>
- #ifdef __ELF__
- # ifdef __GLIBC__
- # if __GLIBC__ >= 2
- LIBC=gnu
- # else
- LIBC=gnulibc1
- # endif
- # else
- LIBC=gnulibc1
- # endif
- #else
- #ifdef __INTEL_COMPILER
- LIBC=gnu
- #else
- LIBC=gnuaout
- #endif
- #endif
- #ifdef __dietlibc__
- LIBC=dietlibc
- #endif
-EOF
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^LIBC=`
- test x"${LIBC}" != x && {
- echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
- exit
- }
- test x"${TENTATIVE}" != x && { echo "${TENTATIVE}"; exit; }
- ;;
- i*86:DYNIX/ptx:4*:*)
- # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
- # earlier versions are messed up and put the nodename in both
- # sysname and nodename.
- echo i386-sequent-sysv4
- exit ;;
- i*86:UNIX_SV:4.2MP:2.*)
- # Unixware is an offshoot of SVR4, but it has its own version
- # number series starting with 2...
- # I am not positive that other SVR4 systems won't match this,
- # I just have to hope. -- rms.
- # Use sysv4.2uw... so that sysv4* matches it.
- echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
- exit ;;
- i*86:OS/2:*:*)
- # If we were able to find `uname', then EMX Unix compatibility
- # is probably installed.
- echo ${UNAME_MACHINE}-pc-os2-emx
- exit ;;
- i*86:XTS-300:*:STOP)
- echo ${UNAME_MACHINE}-unknown-stop
- exit ;;
- i*86:atheos:*:*)
- echo ${UNAME_MACHINE}-unknown-atheos
- exit ;;
- i*86:syllable:*:*)
- echo ${UNAME_MACHINE}-pc-syllable
- exit ;;
- i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*)
- echo i386-unknown-lynxos${UNAME_RELEASE}
- exit ;;
- i*86:*DOS:*:*)
- echo ${UNAME_MACHINE}-pc-msdosdjgpp
- exit ;;
- i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
- UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
- if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
- echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
- else
- echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
- fi
- exit ;;
- i*86:*:5:[678]*)
- # UnixWare 7.x, OpenUNIX and OpenServer 6.
- case `/bin/uname -X | grep "^Machine"` in
- *486*) UNAME_MACHINE=i486 ;;
- *Pentium) UNAME_MACHINE=i586 ;;
- *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
- esac
- echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
- exit ;;
- i*86:*:3.2:*)
- if test -f /usr/options/cb.name; then
- UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
- echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
- elif /bin/uname -X 2>/dev/null >/dev/null ; then
- UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
- (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
- (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
- && UNAME_MACHINE=i586
- (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
- && UNAME_MACHINE=i686
- (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
- && UNAME_MACHINE=i686
- echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
- else
- echo ${UNAME_MACHINE}-pc-sysv32
- fi
- exit ;;
- pc:*:*:*)
- # Left here for compatibility:
- # uname -m prints for DJGPP always 'pc', but it prints nothing about
- # the processor, so we play safe by assuming i386.
- echo i386-pc-msdosdjgpp
- exit ;;
- Intel:Mach:3*:*)
- echo i386-pc-mach3
- exit ;;
- paragon:*:*:*)
- echo i860-intel-osf1
- exit ;;
- i860:*:4.*:*) # i860-SVR4
- if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
- echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
- else # Add other i860-SVR4 vendors below as they are discovered.
- echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4
- fi
- exit ;;
- mini*:CTIX:SYS*5:*)
- # "miniframe"
- echo m68010-convergent-sysv
- exit ;;
- mc68k:UNIX:SYSTEM5:3.51m)
- echo m68k-convergent-sysv
- exit ;;
- M680?0:D-NIX:5.3:*)
- echo m68k-diab-dnix
- exit ;;
- M68*:*:R3V[5678]*:*)
- test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
- 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
- OS_REL=''
- test -r /etc/.relid \
- && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
- && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
- /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
- 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
- && { echo i486-ncr-sysv4; exit; } ;;
- m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
- echo m68k-unknown-lynxos${UNAME_RELEASE}
- exit ;;
- mc68030:UNIX_System_V:4.*:*)
- echo m68k-atari-sysv4
- exit ;;
- TSUNAMI:LynxOS:2.*:*)
- echo sparc-unknown-lynxos${UNAME_RELEASE}
- exit ;;
- rs6000:LynxOS:2.*:*)
- echo rs6000-unknown-lynxos${UNAME_RELEASE}
- exit ;;
- PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*)
- echo powerpc-unknown-lynxos${UNAME_RELEASE}
- exit ;;
- SM[BE]S:UNIX_SV:*:*)
- echo mips-dde-sysv${UNAME_RELEASE}
- exit ;;
- RM*:ReliantUNIX-*:*:*)
- echo mips-sni-sysv4
- exit ;;
- RM*:SINIX-*:*:*)
- echo mips-sni-sysv4
- exit ;;
- *:SINIX-*:*:*)
- if uname -p 2>/dev/null >/dev/null ; then
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
- echo ${UNAME_MACHINE}-sni-sysv4
- else
- echo ns32k-sni-sysv
- fi
- exit ;;
- PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
- # says <Richard.M.Bartel@ccMail.Census.GOV>
- echo i586-unisys-sysv4
- exit ;;
- *:UNIX_System_V:4*:FTX*)
- # From Gerald Hewes <hewes@openmarket.com>.
- # How about differentiating between stratus architectures? -djm
- echo hppa1.1-stratus-sysv4
- exit ;;
- *:*:*:FTX*)
- # From seanf@swdc.stratus.com.
- echo i860-stratus-sysv4
- exit ;;
- i*86:VOS:*:*)
- # From Paul.Green@stratus.com.
- echo ${UNAME_MACHINE}-stratus-vos
- exit ;;
- *:VOS:*:*)
- # From Paul.Green@stratus.com.
- echo hppa1.1-stratus-vos
- exit ;;
- mc68*:A/UX:*:*)
- echo m68k-apple-aux${UNAME_RELEASE}
- exit ;;
- news*:NEWS-OS:6*:*)
- echo mips-sony-newsos6
- exit ;;
- R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
- if [ -d /usr/nec ]; then
- echo mips-nec-sysv${UNAME_RELEASE}
- else
- echo mips-unknown-sysv${UNAME_RELEASE}
- fi
- exit ;;
- BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
- echo powerpc-be-beos
- exit ;;
- BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
- echo powerpc-apple-beos
- exit ;;
- BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
- echo i586-pc-beos
- exit ;;
- SX-4:SUPER-UX:*:*)
- echo sx4-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-5:SUPER-UX:*:*)
- echo sx5-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-6:SUPER-UX:*:*)
- echo sx6-nec-superux${UNAME_RELEASE}
- exit ;;
- Power*:Rhapsody:*:*)
- echo powerpc-apple-rhapsody${UNAME_RELEASE}
- exit ;;
- *:Rhapsody:*:*)
- echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
- exit ;;
- *:Darwin:*:*)
- UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
- case $UNAME_PROCESSOR in
- *86) UNAME_PROCESSOR=i686 ;;
- unknown) UNAME_PROCESSOR=powerpc ;;
- esac
- echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
- exit ;;
- *:procnto*:*:* | *:QNX:[0123456789]*:*)
- UNAME_PROCESSOR=`uname -p`
- if test "$UNAME_PROCESSOR" = "x86"; then
- UNAME_PROCESSOR=i386
- UNAME_MACHINE=pc
- fi
- echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
- exit ;;
- *:QNX:*:4*)
- echo i386-pc-qnx
- exit ;;
- NSE-?:NONSTOP_KERNEL:*:*)
- echo nse-tandem-nsk${UNAME_RELEASE}
- exit ;;
- NSR-?:NONSTOP_KERNEL:*:*)
- echo nsr-tandem-nsk${UNAME_RELEASE}
- exit ;;
- *:NonStop-UX:*:*)
- echo mips-compaq-nonstopux
- exit ;;
- BS2000:POSIX*:*:*)
- echo bs2000-siemens-sysv
- exit ;;
- DS/*:UNIX_System_V:*:*)
- echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
- exit ;;
- *:Plan9:*:*)
- # "uname -m" is not consistent, so use $cputype instead. 386
- # is converted to i386 for consistency with other x86
- # operating systems.
- if test "$cputype" = "386"; then
- UNAME_MACHINE=i386
- else
- UNAME_MACHINE="$cputype"
- fi
- echo ${UNAME_MACHINE}-unknown-plan9
- exit ;;
- *:TOPS-10:*:*)
- echo pdp10-unknown-tops10
- exit ;;
- *:TENEX:*:*)
- echo pdp10-unknown-tenex
- exit ;;
- KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
- echo pdp10-dec-tops20
- exit ;;
- XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
- echo pdp10-xkl-tops20
- exit ;;
- *:TOPS-20:*:*)
- echo pdp10-unknown-tops20
- exit ;;
- *:ITS:*:*)
- echo pdp10-unknown-its
- exit ;;
- SEI:*:*:SEIUX)
- echo mips-sei-seiux${UNAME_RELEASE}
- exit ;;
- *:DragonFly:*:*)
- echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
- exit ;;
- *:*VMS:*:*)
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
- case "${UNAME_MACHINE}" in
- A*) echo alpha-dec-vms ; exit ;;
- I*) echo ia64-dec-vms ; exit ;;
- V*) echo vax-dec-vms ; exit ;;
- esac ;;
- *:XENIX:*:SysV)
- echo i386-pc-xenix
- exit ;;
- i*86:skyos:*:*)
- echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
- exit ;;
-esac
-
-#echo '(No uname command or uname output not recognized.)' 1>&2
-#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
-
-eval $set_cc_for_build
-cat >$dummy.c <<EOF
-#ifdef _SEQUENT_
-# include <sys/types.h>
-# include <sys/utsname.h>
-#endif
-main ()
-{
-#if defined (sony)
-#if defined (MIPSEB)
- /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
- I don't know.... */
- printf ("mips-sony-bsd\n"); exit (0);
-#else
-#include <sys/param.h>
- printf ("m68k-sony-newsos%s\n",
-#ifdef NEWSOS4
- "4"
-#else
- ""
-#endif
- ); exit (0);
-#endif
-#endif
-
-#if defined (__arm) && defined (__acorn) && defined (__unix)
- printf ("arm-acorn-riscix\n"); exit (0);
-#endif
-
-#if defined (hp300) && !defined (hpux)
- printf ("m68k-hp-bsd\n"); exit (0);
-#endif
-
-#if defined (NeXT)
-#if !defined (__ARCHITECTURE__)
-#define __ARCHITECTURE__ "m68k"
-#endif
- int version;
- version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
- if (version < 4)
- printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
- else
- printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
- exit (0);
-#endif
-
-#if defined (MULTIMAX) || defined (n16)
-#if defined (UMAXV)
- printf ("ns32k-encore-sysv\n"); exit (0);
-#else
-#if defined (CMU)
- printf ("ns32k-encore-mach\n"); exit (0);
-#else
- printf ("ns32k-encore-bsd\n"); exit (0);
-#endif
-#endif
-#endif
-
-#if defined (__386BSD__)
- printf ("i386-pc-bsd\n"); exit (0);
-#endif
-
-#if defined (sequent)
-#if defined (i386)
- printf ("i386-sequent-dynix\n"); exit (0);
-#endif
-#if defined (ns32000)
- printf ("ns32k-sequent-dynix\n"); exit (0);
-#endif
-#endif
-
-#if defined (_SEQUENT_)
- struct utsname un;
-
- uname(&un);
-
- if (strncmp(un.version, "V2", 2) == 0) {
- printf ("i386-sequent-ptx2\n"); exit (0);
- }
- if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
- printf ("i386-sequent-ptx1\n"); exit (0);
- }
- printf ("i386-sequent-ptx\n"); exit (0);
-
-#endif
-
-#if defined (vax)
-# if !defined (ultrix)
-# include <sys/param.h>
-# if defined (BSD)
-# if BSD == 43
- printf ("vax-dec-bsd4.3\n"); exit (0);
-# else
-# if BSD == 199006
- printf ("vax-dec-bsd4.3reno\n"); exit (0);
-# else
- printf ("vax-dec-bsd\n"); exit (0);
-# endif
-# endif
-# else
- printf ("vax-dec-bsd\n"); exit (0);
-# endif
-# else
- printf ("vax-dec-ultrix\n"); exit (0);
-# endif
-#endif
-
-#if defined (alliant) && defined (i860)
- printf ("i860-alliant-bsd\n"); exit (0);
-#endif
-
- exit (1);
-}
-EOF
-
-$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
- { echo "$SYSTEM_NAME"; exit; }
-
-# Apollos put the system type in the environment.
-
-test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
-
-# Convex versions that predate uname can use getsysinfo(1)
-
-if [ -x /usr/convex/getsysinfo ]
-then
- case `getsysinfo -f cpu_type` in
- c1*)
- echo c1-convex-bsd
- exit ;;
- c2*)
- if getsysinfo -f scalar_acc
- then echo c32-convex-bsd
- else echo c2-convex-bsd
- fi
- exit ;;
- c34*)
- echo c34-convex-bsd
- exit ;;
- c38*)
- echo c38-convex-bsd
- exit ;;
- c4*)
- echo c4-convex-bsd
- exit ;;
- esac
-fi
-
-cat >&2 <<EOF
-$0: unable to guess system type
-
-This script, last modified $timestamp, has failed to recognize
-the operating system you are using. It is advised that you
-download the most up to date version of the config scripts from
-
- http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.guess
-and
- http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.sub
-
-If the version you run ($0) is already up to date, please
-send the following data and any information you think might be
-pertinent to <config-patches@gnu.org> in order to provide the needed
-information to handle your system.
-
-config.guess timestamp = $timestamp
-
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
-
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
-/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
-
-hostinfo = `(hostinfo) 2>/dev/null`
-/bin/universe = `(/bin/universe) 2>/dev/null`
-/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
-/bin/arch = `(/bin/arch) 2>/dev/null`
-/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
-
-UNAME_MACHINE = ${UNAME_MACHINE}
-UNAME_RELEASE = ${UNAME_RELEASE}
-UNAME_SYSTEM = ${UNAME_SYSTEM}
-UNAME_VERSION = ${UNAME_VERSION}
-EOF
-
-exit 1
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:
diff --git a/config.h.in b/config.h.in
deleted file mode 100644
index 86126d432..000000000
--- a/config.h.in
+++ /dev/null
@@ -1,122 +0,0 @@
-/* config.h.in. Generated from configure.in by autoheader. */
-
-/* Defined if the requested minimum BOOST version is satisfied */
-#undef HAVE_BOOST
-
-/* Define to 1 if you have <boost/archive/text_oarchive.hpp> */
-#undef HAVE_BOOST_ARCHIVE_TEXT_OARCHIVE_HPP
-
-/* Define to 1 if you have <boost/mpi/communicator.hpp> */
-#undef HAVE_BOOST_MPI_COMMUNICATOR_HPP
-
-/* Define to 1 if you have <boost/program_options.hpp> */
-#undef HAVE_BOOST_PROGRAM_OPTIONS_HPP
-
-/* Define to 1 if you have <boost/scoped_ptr.hpp> */
-#undef HAVE_BOOST_SCOPED_PTR_HPP
-
-/* Define to 1 if you have <boost/shared_ptr.hpp> */
-#undef HAVE_BOOST_SHARED_PTR_HPP
-
-/* Define to 1 if you have <boost/thread.hpp> */
-#undef HAVE_BOOST_THREAD_HPP
-
-/* Define to 1 if you have the <dlfcn.h> header file. */
-#undef HAVE_DLFCN_H
-
-/* flag for DMapLM */
-#undef HAVE_DMAPLM
-
-/* Define to 1 if you have the <getopt.h> header file. */
-#undef HAVE_GETOPT_H
-
-/* Define to 1 if you have the <inttypes.h> header file. */
-#undef HAVE_INTTYPES_H
-
-/* flag for IRSTLM */
-#undef HAVE_IRSTLM
-
-/* Define to 1 if you have the `oolm' library (-loolm). */
-#undef HAVE_LIBOOLM
-
-/* Define to 1 if you have the `tcmalloc' library (-ltcmalloc). */
-#undef HAVE_LIBTCMALLOC
-
-/* Define to 1 if you have the <memory.h> header file. */
-#undef HAVE_MEMORY_H
-
-/* Define to 1 if you have the <nl-cpt.h> header file. */
-#undef HAVE_NL_CPT_H
-
-/* flag for ORLM */
-#undef HAVE_ORLM
-
-/* flag for protobuf */
-#undef HAVE_PROTOBUF
-
-/* flag for RandLM */
-#undef HAVE_RANDLM
-
-/* flag for SRILM */
-#undef HAVE_SRILM
-
-/* Define to 1 if you have the <stdint.h> header file. */
-#undef HAVE_STDINT_H
-
-/* Define to 1 if you have the <stdlib.h> header file. */
-#undef HAVE_STDLIB_H
-
-/* Define to 1 if you have the <strings.h> header file. */
-#undef HAVE_STRINGS_H
-
-/* Define to 1 if you have the <string.h> header file. */
-#undef HAVE_STRING_H
-
-/* flag for Syntactic Parser */
-#undef HAVE_SYNLM
-
-/* Define to 1 if you have the <sys/stat.h> header file. */
-#undef HAVE_SYS_STAT_H
-
-/* Define to 1 if you have the <sys/types.h> header file. */
-#undef HAVE_SYS_TYPES_H
-
-/* Define to 1 if you have the <unistd.h> header file. */
-#undef HAVE_UNISTD_H
-
-/* flag for zlib */
-#undef HAVE_ZLIB
-
-/* Define to the sub-directory in which libtool stores uninstalled libraries.
- */
-#undef LT_OBJDIR
-
-/* Define if compiling with MPI. */
-#undef MPI_ENABLED
-
-/* Name of package */
-#undef PACKAGE
-
-/* Define to the address where bug reports for this package should be sent. */
-#undef PACKAGE_BUGREPORT
-
-/* Define to the full name of this package. */
-#undef PACKAGE_NAME
-
-/* Define to the full name and version of this package. */
-#undef PACKAGE_STRING
-
-/* Define to the one symbol short name of this package. */
-#undef PACKAGE_TARNAME
-
-/* Define to the version of this package. */
-#undef PACKAGE_VERSION
-
-/* Define to 1 if you have the ANSI C header files. */
-#undef STDC_HEADERS
-
-/* Flag to enable use of Boost pool */
-#undef USE_BOOST_POOL
-
-/* Version number of package */
-#undef VERSION
diff --git a/config.sub b/config.sub
deleted file mode 100755
index 1c366dfde..000000000
--- a/config.sub
+++ /dev/null
@@ -1,1579 +0,0 @@
-#! /bin/sh
-# Configuration validation subroutine script.
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
-# 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
-
-timestamp='2005-07-08'
-
-# This file is (in principle) common to ALL GNU software.
-# The presence of a machine in this file suggests that SOME GNU software
-# can handle that machine. It does not imply ALL GNU software can.
-#
-# This file is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
-# 02110-1301, USA.
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-
-# Please send patches to <config-patches@gnu.org>. Submit a context
-# diff and a properly formatted ChangeLog entry.
-#
-# Configuration subroutine to validate and canonicalize a configuration type.
-# Supply the specified configuration type as an argument.
-# If it is invalid, we print an error message on stderr and exit with code 1.
-# Otherwise, we print the canonical config type on stdout and succeed.
-
-# This file is supposed to be the same for all GNU packages
-# and recognize all the CPU types, system types and aliases
-# that are meaningful with *any* GNU software.
-# Each package is responsible for reporting which valid configurations
-# it does not support. The user should be able to distinguish
-# a failure to support a valid configuration from a meaningless
-# configuration.
-
-# The goal of this file is to map all the various variations of a given
-# machine specification into a single specification in the form:
-# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
-# or in some cases, the newer four-part form:
-# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
-# It is wrong to echo any other type of specification.
-
-me=`echo "$0" | sed -e 's,.*/,,'`
-
-usage="\
-Usage: $0 [OPTION] CPU-MFR-OPSYS
- $0 [OPTION] ALIAS
-
-Canonicalize a configuration name.
-
-Operation modes:
- -h, --help print this help, then exit
- -t, --time-stamp print date of last modification, then exit
- -v, --version print version number, then exit
-
-Report bugs and patches to <config-patches@gnu.org>."
-
-version="\
-GNU config.sub ($timestamp)
-
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
-Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions. There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
- case $1 in
- --time-stamp | --time* | -t )
- echo "$timestamp" ; exit ;;
- --version | -v )
- echo "$version" ; exit ;;
- --help | --h* | -h )
- echo "$usage"; exit ;;
- -- ) # Stop option processing
- shift; break ;;
- - ) # Use stdin as input.
- break ;;
- -* )
- echo "$me: invalid option $1$help"
- exit 1 ;;
-
- *local*)
- # First pass through any local machine types.
- echo $1
- exit ;;
-
- * )
- break ;;
- esac
-done
-
-case $# in
- 0) echo "$me: missing argument$help" >&2
- exit 1;;
- 1) ;;
- *) echo "$me: too many arguments$help" >&2
- exit 1;;
-esac
-
-# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
-# Here we must recognize all the valid KERNEL-OS combinations.
-maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
-case $maybe_os in
- nto-qnx* | linux-gnu* | linux-dietlibc | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | \
- kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | storm-chaos* | os2-emx* | rtmk-nova*)
- os=-$maybe_os
- basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
- ;;
- *)
- basic_machine=`echo $1 | sed 's/-[^-]*$//'`
- if [ $basic_machine != $1 ]
- then os=`echo $1 | sed 's/.*-/-/'`
- else os=; fi
- ;;
-esac
-
-### Let's recognize common machines as not being operating systems so
-### that things like config.sub decstation-3100 work. We also
-### recognize some manufacturers as not being operating systems, so we
-### can provide default operating systems below.
-case $os in
- -sun*os*)
- # Prevent following clause from handling this invalid input.
- ;;
- -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
- -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
- -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
- -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
- -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
- -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
- -apple | -axis | -knuth | -cray)
- os=
- basic_machine=$1
- ;;
- -sim | -cisco | -oki | -wec | -winbond)
- os=
- basic_machine=$1
- ;;
- -scout)
- ;;
- -wrs)
- os=-vxworks
- basic_machine=$1
- ;;
- -chorusos*)
- os=-chorusos
- basic_machine=$1
- ;;
- -chorusrdb)
- os=-chorusrdb
- basic_machine=$1
- ;;
- -hiux*)
- os=-hiuxwe2
- ;;
- -sco5)
- os=-sco3.2v5
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco4)
- os=-sco3.2v4
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco3.2.[4-9]*)
- os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco3.2v[4-9]*)
- # Don't forget version if it is 3.2v4 or newer.
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco*)
- os=-sco3.2v2
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -udk*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -isc)
- os=-isc2.2
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -clix*)
- basic_machine=clipper-intergraph
- ;;
- -isc*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -lynx*)
- os=-lynxos
- ;;
- -ptx*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
- ;;
- -windowsnt*)
- os=`echo $os | sed -e 's/windowsnt/winnt/'`
- ;;
- -psos*)
- os=-psos
- ;;
- -mint | -mint[0-9]*)
- basic_machine=m68k-atari
- os=-mint
- ;;
-esac
-
-# Decode aliases for certain CPU-COMPANY combinations.
-case $basic_machine in
- # Recognize the basic CPU types without company name.
- # Some are omitted here because they have special meanings below.
- 1750a | 580 \
- | a29k \
- | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
- | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
- | am33_2.0 \
- | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \
- | bfin \
- | c4x | clipper \
- | d10v | d30v | dlx | dsp16xx \
- | fr30 | frv \
- | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
- | i370 | i860 | i960 | ia64 \
- | ip2k | iq2000 \
- | m32r | m32rle | m68000 | m68k | m88k | maxq | mcore \
- | mips | mipsbe | mipseb | mipsel | mipsle \
- | mips16 \
- | mips64 | mips64el \
- | mips64vr | mips64vrel \
- | mips64orion | mips64orionel \
- | mips64vr4100 | mips64vr4100el \
- | mips64vr4300 | mips64vr4300el \
- | mips64vr5000 | mips64vr5000el \
- | mips64vr5900 | mips64vr5900el \
- | mipsisa32 | mipsisa32el \
- | mipsisa32r2 | mipsisa32r2el \
- | mipsisa64 | mipsisa64el \
- | mipsisa64r2 | mipsisa64r2el \
- | mipsisa64sb1 | mipsisa64sb1el \
- | mipsisa64sr71k | mipsisa64sr71kel \
- | mipstx39 | mipstx39el \
- | mn10200 | mn10300 \
- | ms1 \
- | msp430 \
- | ns16k | ns32k \
- | or32 \
- | pdp10 | pdp11 | pj | pjl \
- | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
- | pyramid \
- | sh | sh[1234] | sh[24]a | sh[23]e | sh[34]eb | shbe | shle | sh[1234]le | sh3ele \
- | sh64 | sh64le \
- | sparc | sparc64 | sparc64b | sparc86x | sparclet | sparclite \
- | sparcv8 | sparcv9 | sparcv9b \
- | strongarm \
- | tahoe | thumb | tic4x | tic80 | tron \
- | v850 | v850e \
- | we32k \
- | x86 | xscale | xscalee[bl] | xstormy16 | xtensa \
- | z8k)
- basic_machine=$basic_machine-unknown
- ;;
- m32c)
- basic_machine=$basic_machine-unknown
- ;;
- m6811 | m68hc11 | m6812 | m68hc12)
- # Motorola 68HC11/12.
- basic_machine=$basic_machine-unknown
- os=-none
- ;;
- m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
- ;;
-
- # We use `pc' rather than `unknown'
- # because (1) that's what they normally are, and
- # (2) the word "unknown" tends to confuse beginning users.
- i*86 | x86_64)
- basic_machine=$basic_machine-pc
- ;;
- # Object if more than one company name word.
- *-*-*)
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
- exit 1
- ;;
- # Recognize the basic CPU types with company name.
- 580-* \
- | a29k-* \
- | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
- | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
- | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
- | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
- | avr-* \
- | bfin-* | bs2000-* \
- | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
- | clipper-* | craynv-* | cydra-* \
- | d10v-* | d30v-* | dlx-* \
- | elxsi-* \
- | f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \
- | h8300-* | h8500-* \
- | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
- | i*86-* | i860-* | i960-* | ia64-* \
- | ip2k-* | iq2000-* \
- | m32r-* | m32rle-* \
- | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
- | m88110-* | m88k-* | maxq-* | mcore-* \
- | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
- | mips16-* \
- | mips64-* | mips64el-* \
- | mips64vr-* | mips64vrel-* \
- | mips64orion-* | mips64orionel-* \
- | mips64vr4100-* | mips64vr4100el-* \
- | mips64vr4300-* | mips64vr4300el-* \
- | mips64vr5000-* | mips64vr5000el-* \
- | mips64vr5900-* | mips64vr5900el-* \
- | mipsisa32-* | mipsisa32el-* \
- | mipsisa32r2-* | mipsisa32r2el-* \
- | mipsisa64-* | mipsisa64el-* \
- | mipsisa64r2-* | mipsisa64r2el-* \
- | mipsisa64sb1-* | mipsisa64sb1el-* \
- | mipsisa64sr71k-* | mipsisa64sr71kel-* \
- | mipstx39-* | mipstx39el-* \
- | mmix-* \
- | ms1-* \
- | msp430-* \
- | none-* | np1-* | ns16k-* | ns32k-* \
- | orion-* \
- | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
- | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
- | pyramid-* \
- | romp-* | rs6000-* \
- | sh-* | sh[1234]-* | sh[24]a-* | sh[23]e-* | sh[34]eb-* | shbe-* \
- | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
- | sparc-* | sparc64-* | sparc64b-* | sparc86x-* | sparclet-* \
- | sparclite-* \
- | sparcv8-* | sparcv9-* | sparcv9b-* | strongarm-* | sv1-* | sx?-* \
- | tahoe-* | thumb-* \
- | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
- | tron-* \
- | v850-* | v850e-* | vax-* \
- | we32k-* \
- | x86-* | x86_64-* | xps100-* | xscale-* | xscalee[bl]-* \
- | xstormy16-* | xtensa-* \
- | ymp-* \
- | z8k-*)
- ;;
- m32c-*)
- ;;
- # Recognize the various machine names and aliases which stand
- # for a CPU type and a company and sometimes even an OS.
- 386bsd)
- basic_machine=i386-unknown
- os=-bsd
- ;;
- 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
- basic_machine=m68000-att
- ;;
- 3b*)
- basic_machine=we32k-att
- ;;
- a29khif)
- basic_machine=a29k-amd
- os=-udi
- ;;
- abacus)
- basic_machine=abacus-unknown
- ;;
- adobe68k)
- basic_machine=m68010-adobe
- os=-scout
- ;;
- alliant | fx80)
- basic_machine=fx80-alliant
- ;;
- altos | altos3068)
- basic_machine=m68k-altos
- ;;
- am29k)
- basic_machine=a29k-none
- os=-bsd
- ;;
- amd64)
- basic_machine=x86_64-pc
- ;;
- amd64-*)
- basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- amdahl)
- basic_machine=580-amdahl
- os=-sysv
- ;;
- amiga | amiga-*)
- basic_machine=m68k-unknown
- ;;
- amigaos | amigados)
- basic_machine=m68k-unknown
- os=-amigaos
- ;;
- amigaunix | amix)
- basic_machine=m68k-unknown
- os=-sysv4
- ;;
- apollo68)
- basic_machine=m68k-apollo
- os=-sysv
- ;;
- apollo68bsd)
- basic_machine=m68k-apollo
- os=-bsd
- ;;
- aux)
- basic_machine=m68k-apple
- os=-aux
- ;;
- balance)
- basic_machine=ns32k-sequent
- os=-dynix
- ;;
- c90)
- basic_machine=c90-cray
- os=-unicos
- ;;
- convex-c1)
- basic_machine=c1-convex
- os=-bsd
- ;;
- convex-c2)
- basic_machine=c2-convex
- os=-bsd
- ;;
- convex-c32)
- basic_machine=c32-convex
- os=-bsd
- ;;
- convex-c34)
- basic_machine=c34-convex
- os=-bsd
- ;;
- convex-c38)
- basic_machine=c38-convex
- os=-bsd
- ;;
- cray | j90)
- basic_machine=j90-cray
- os=-unicos
- ;;
- craynv)
- basic_machine=craynv-cray
- os=-unicosmp
- ;;
- cr16c)
- basic_machine=cr16c-unknown
- os=-elf
- ;;
- crds | unos)
- basic_machine=m68k-crds
- ;;
- crisv32 | crisv32-* | etraxfs*)
- basic_machine=crisv32-axis
- ;;
- cris | cris-* | etrax*)
- basic_machine=cris-axis
- ;;
- crx)
- basic_machine=crx-unknown
- os=-elf
- ;;
- da30 | da30-*)
- basic_machine=m68k-da30
- ;;
- decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
- basic_machine=mips-dec
- ;;
- decsystem10* | dec10*)
- basic_machine=pdp10-dec
- os=-tops10
- ;;
- decsystem20* | dec20*)
- basic_machine=pdp10-dec
- os=-tops20
- ;;
- delta | 3300 | motorola-3300 | motorola-delta \
- | 3300-motorola | delta-motorola)
- basic_machine=m68k-motorola
- ;;
- delta88)
- basic_machine=m88k-motorola
- os=-sysv3
- ;;
- djgpp)
- basic_machine=i586-pc
- os=-msdosdjgpp
- ;;
- dpx20 | dpx20-*)
- basic_machine=rs6000-bull
- os=-bosx
- ;;
- dpx2* | dpx2*-bull)
- basic_machine=m68k-bull
- os=-sysv3
- ;;
- ebmon29k)
- basic_machine=a29k-amd
- os=-ebmon
- ;;
- elxsi)
- basic_machine=elxsi-elxsi
- os=-bsd
- ;;
- encore | umax | mmax)
- basic_machine=ns32k-encore
- ;;
- es1800 | OSE68k | ose68k | ose | OSE)
- basic_machine=m68k-ericsson
- os=-ose
- ;;
- fx2800)
- basic_machine=i860-alliant
- ;;
- genix)
- basic_machine=ns32k-ns
- ;;
- gmicro)
- basic_machine=tron-gmicro
- os=-sysv
- ;;
- go32)
- basic_machine=i386-pc
- os=-go32
- ;;
- h3050r* | hiux*)
- basic_machine=hppa1.1-hitachi
- os=-hiuxwe2
- ;;
- h8300hms)
- basic_machine=h8300-hitachi
- os=-hms
- ;;
- h8300xray)
- basic_machine=h8300-hitachi
- os=-xray
- ;;
- h8500hms)
- basic_machine=h8500-hitachi
- os=-hms
- ;;
- harris)
- basic_machine=m88k-harris
- os=-sysv3
- ;;
- hp300-*)
- basic_machine=m68k-hp
- ;;
- hp300bsd)
- basic_machine=m68k-hp
- os=-bsd
- ;;
- hp300hpux)
- basic_machine=m68k-hp
- os=-hpux
- ;;
- hp3k9[0-9][0-9] | hp9[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hp9k2[0-9][0-9] | hp9k31[0-9])
- basic_machine=m68000-hp
- ;;
- hp9k3[2-9][0-9])
- basic_machine=m68k-hp
- ;;
- hp9k6[0-9][0-9] | hp6[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hp9k7[0-79][0-9] | hp7[0-79][0-9])
- basic_machine=hppa1.1-hp
- ;;
- hp9k78[0-9] | hp78[0-9])
- # FIXME: really hppa2.0-hp
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
- # FIXME: really hppa2.0-hp
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[0-9][13679] | hp8[0-9][13679])
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[0-9][0-9] | hp8[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hppa-next)
- os=-nextstep3
- ;;
- hppaosf)
- basic_machine=hppa1.1-hp
- os=-osf
- ;;
- hppro)
- basic_machine=hppa1.1-hp
- os=-proelf
- ;;
- i370-ibm* | ibm*)
- basic_machine=i370-ibm
- ;;
-# I'm not sure what "Sysv32" means. Should this be sysv3.2?
- i*86v32)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv32
- ;;
- i*86v4*)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv4
- ;;
- i*86v)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv
- ;;
- i*86sol2)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-solaris2
- ;;
- i386mach)
- basic_machine=i386-mach
- os=-mach
- ;;
- i386-vsta | vsta)
- basic_machine=i386-unknown
- os=-vsta
- ;;
- iris | iris4d)
- basic_machine=mips-sgi
- case $os in
- -irix*)
- ;;
- *)
- os=-irix4
- ;;
- esac
- ;;
- isi68 | isi)
- basic_machine=m68k-isi
- os=-sysv
- ;;
- m88k-omron*)
- basic_machine=m88k-omron
- ;;
- magnum | m3230)
- basic_machine=mips-mips
- os=-sysv
- ;;
- merlin)
- basic_machine=ns32k-utek
- os=-sysv
- ;;
- mingw32)
- basic_machine=i386-pc
- os=-mingw32
- ;;
- miniframe)
- basic_machine=m68000-convergent
- ;;
- *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
- basic_machine=m68k-atari
- os=-mint
- ;;
- mips3*-*)
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
- ;;
- mips3*)
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
- ;;
- monitor)
- basic_machine=m68k-rom68k
- os=-coff
- ;;
- morphos)
- basic_machine=powerpc-unknown
- os=-morphos
- ;;
- msdos)
- basic_machine=i386-pc
- os=-msdos
- ;;
- mvs)
- basic_machine=i370-ibm
- os=-mvs
- ;;
- ncr3000)
- basic_machine=i486-ncr
- os=-sysv4
- ;;
- netbsd386)
- basic_machine=i386-unknown
- os=-netbsd
- ;;
- netwinder)
- basic_machine=armv4l-rebel
- os=-linux
- ;;
- news | news700 | news800 | news900)
- basic_machine=m68k-sony
- os=-newsos
- ;;
- news1000)
- basic_machine=m68030-sony
- os=-newsos
- ;;
- news-3600 | risc-news)
- basic_machine=mips-sony
- os=-newsos
- ;;
- necv70)
- basic_machine=v70-nec
- os=-sysv
- ;;
- next | m*-next )
- basic_machine=m68k-next
- case $os in
- -nextstep* )
- ;;
- -ns2*)
- os=-nextstep2
- ;;
- *)
- os=-nextstep3
- ;;
- esac
- ;;
- nh3000)
- basic_machine=m68k-harris
- os=-cxux
- ;;
- nh[45]000)
- basic_machine=m88k-harris
- os=-cxux
- ;;
- nindy960)
- basic_machine=i960-intel
- os=-nindy
- ;;
- mon960)
- basic_machine=i960-intel
- os=-mon960
- ;;
- nonstopux)
- basic_machine=mips-compaq
- os=-nonstopux
- ;;
- np1)
- basic_machine=np1-gould
- ;;
- nsr-tandem)
- basic_machine=nsr-tandem
- ;;
- op50n-* | op60c-*)
- basic_machine=hppa1.1-oki
- os=-proelf
- ;;
- openrisc | openrisc-*)
- basic_machine=or32-unknown
- ;;
- os400)
- basic_machine=powerpc-ibm
- os=-os400
- ;;
- OSE68000 | ose68000)
- basic_machine=m68000-ericsson
- os=-ose
- ;;
- os68k)
- basic_machine=m68k-none
- os=-os68k
- ;;
- pa-hitachi)
- basic_machine=hppa1.1-hitachi
- os=-hiuxwe2
- ;;
- paragon)
- basic_machine=i860-intel
- os=-osf
- ;;
- pbd)
- basic_machine=sparc-tti
- ;;
- pbb)
- basic_machine=m68k-tti
- ;;
- pc532 | pc532-*)
- basic_machine=ns32k-pc532
- ;;
- pentium | p5 | k5 | k6 | nexgen | viac3)
- basic_machine=i586-pc
- ;;
- pentiumpro | p6 | 6x86 | athlon | athlon_*)
- basic_machine=i686-pc
- ;;
- pentiumii | pentium2 | pentiumiii | pentium3)
- basic_machine=i686-pc
- ;;
- pentium4)
- basic_machine=i786-pc
- ;;
- pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
- basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentiumpro-* | p6-* | 6x86-* | athlon-*)
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentium4-*)
- basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pn)
- basic_machine=pn-gould
- ;;
- power) basic_machine=power-ibm
- ;;
- ppc) basic_machine=powerpc-unknown
- ;;
- ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppcle | powerpclittle | ppc-le | powerpc-little)
- basic_machine=powerpcle-unknown
- ;;
- ppcle-* | powerpclittle-*)
- basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppc64) basic_machine=powerpc64-unknown
- ;;
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppc64le | powerpc64little | ppc64-le | powerpc64-little)
- basic_machine=powerpc64le-unknown
- ;;
- ppc64le-* | powerpc64little-*)
- basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ps2)
- basic_machine=i386-ibm
- ;;
- pw32)
- basic_machine=i586-unknown
- os=-pw32
- ;;
- rom68k)
- basic_machine=m68k-rom68k
- os=-coff
- ;;
- rm[46]00)
- basic_machine=mips-siemens
- ;;
- rtpc | rtpc-*)
- basic_machine=romp-ibm
- ;;
- s390 | s390-*)
- basic_machine=s390-ibm
- ;;
- s390x | s390x-*)
- basic_machine=s390x-ibm
- ;;
- sa29200)
- basic_machine=a29k-amd
- os=-udi
- ;;
- sb1)
- basic_machine=mipsisa64sb1-unknown
- ;;
- sb1el)
- basic_machine=mipsisa64sb1el-unknown
- ;;
- sei)
- basic_machine=mips-sei
- os=-seiux
- ;;
- sequent)
- basic_machine=i386-sequent
- ;;
- sh)
- basic_machine=sh-hitachi
- os=-hms
- ;;
- sh64)
- basic_machine=sh64-unknown
- ;;
- sparclite-wrs | simso-wrs)
- basic_machine=sparclite-wrs
- os=-vxworks
- ;;
- sps7)
- basic_machine=m68k-bull
- os=-sysv2
- ;;
- spur)
- basic_machine=spur-unknown
- ;;
- st2000)
- basic_machine=m68k-tandem
- ;;
- stratus)
- basic_machine=i860-stratus
- os=-sysv4
- ;;
- sun2)
- basic_machine=m68000-sun
- ;;
- sun2os3)
- basic_machine=m68000-sun
- os=-sunos3
- ;;
- sun2os4)
- basic_machine=m68000-sun
- os=-sunos4
- ;;
- sun3os3)
- basic_machine=m68k-sun
- os=-sunos3
- ;;
- sun3os4)
- basic_machine=m68k-sun
- os=-sunos4
- ;;
- sun4os3)
- basic_machine=sparc-sun
- os=-sunos3
- ;;
- sun4os4)
- basic_machine=sparc-sun
- os=-sunos4
- ;;
- sun4sol2)
- basic_machine=sparc-sun
- os=-solaris2
- ;;
- sun3 | sun3-*)
- basic_machine=m68k-sun
- ;;
- sun4)
- basic_machine=sparc-sun
- ;;
- sun386 | sun386i | roadrunner)
- basic_machine=i386-sun
- ;;
- sv1)
- basic_machine=sv1-cray
- os=-unicos
- ;;
- symmetry)
- basic_machine=i386-sequent
- os=-dynix
- ;;
- t3e)
- basic_machine=alphaev5-cray
- os=-unicos
- ;;
- t90)
- basic_machine=t90-cray
- os=-unicos
- ;;
- tic54x | c54x*)
- basic_machine=tic54x-unknown
- os=-coff
- ;;
- tic55x | c55x*)
- basic_machine=tic55x-unknown
- os=-coff
- ;;
- tic6x | c6x*)
- basic_machine=tic6x-unknown
- os=-coff
- ;;
- tx39)
- basic_machine=mipstx39-unknown
- ;;
- tx39el)
- basic_machine=mipstx39el-unknown
- ;;
- toad1)
- basic_machine=pdp10-xkl
- os=-tops20
- ;;
- tower | tower-32)
- basic_machine=m68k-ncr
- ;;
- tpf)
- basic_machine=s390x-ibm
- os=-tpf
- ;;
- udi29k)
- basic_machine=a29k-amd
- os=-udi
- ;;
- ultra3)
- basic_machine=a29k-nyu
- os=-sym1
- ;;
- v810 | necv810)
- basic_machine=v810-nec
- os=-none
- ;;
- vaxv)
- basic_machine=vax-dec
- os=-sysv
- ;;
- vms)
- basic_machine=vax-dec
- os=-vms
- ;;
- vpp*|vx|vx-*)
- basic_machine=f301-fujitsu
- ;;
- vxworks960)
- basic_machine=i960-wrs
- os=-vxworks
- ;;
- vxworks68)
- basic_machine=m68k-wrs
- os=-vxworks
- ;;
- vxworks29k)
- basic_machine=a29k-wrs
- os=-vxworks
- ;;
- w65*)
- basic_machine=w65-wdc
- os=-none
- ;;
- w89k-*)
- basic_machine=hppa1.1-winbond
- os=-proelf
- ;;
- xbox)
- basic_machine=i686-pc
- os=-mingw32
- ;;
- xps | xps100)
- basic_machine=xps100-honeywell
- ;;
- ymp)
- basic_machine=ymp-cray
- os=-unicos
- ;;
- z8k-*-coff)
- basic_machine=z8k-unknown
- os=-sim
- ;;
- none)
- basic_machine=none-none
- os=-none
- ;;
-
-# Here we handle the default manufacturer of certain CPU types. It is in
-# some cases the only manufacturer, in others, it is the most popular.
- w89k)
- basic_machine=hppa1.1-winbond
- ;;
- op50n)
- basic_machine=hppa1.1-oki
- ;;
- op60c)
- basic_machine=hppa1.1-oki
- ;;
- romp)
- basic_machine=romp-ibm
- ;;
- mmix)
- basic_machine=mmix-knuth
- ;;
- rs6000)
- basic_machine=rs6000-ibm
- ;;
- vax)
- basic_machine=vax-dec
- ;;
- pdp10)
- # there are many clones, so DEC is not a safe bet
- basic_machine=pdp10-unknown
- ;;
- pdp11)
- basic_machine=pdp11-dec
- ;;
- we32k)
- basic_machine=we32k-att
- ;;
- sh[1234] | sh[24]a | sh[34]eb | sh[1234]le | sh[23]ele)
- basic_machine=sh-unknown
- ;;
- sparc | sparcv8 | sparcv9 | sparcv9b)
- basic_machine=sparc-sun
- ;;
- cydra)
- basic_machine=cydra-cydrome
- ;;
- orion)
- basic_machine=orion-highlevel
- ;;
- orion105)
- basic_machine=clipper-highlevel
- ;;
- mac | mpw | mac-mpw)
- basic_machine=m68k-apple
- ;;
- pmac | pmac-mpw)
- basic_machine=powerpc-apple
- ;;
- *-unknown)
- # Make sure to match an already-canonicalized machine name.
- ;;
- *)
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
- exit 1
- ;;
-esac
-
-# Here we canonicalize certain aliases for manufacturers.
-case $basic_machine in
- *-digital*)
- basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
- ;;
- *-commodore*)
- basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
- ;;
- *)
- ;;
-esac
-
-# Decode manufacturer-specific aliases for certain operating systems.
-
-if [ x"$os" != x"" ]
-then
-case $os in
- # First match some system type aliases
- # that might get confused with valid system types.
- # -solaris* is a basic system type, with this one exception.
- -solaris1 | -solaris1.*)
- os=`echo $os | sed -e 's|solaris1|sunos4|'`
- ;;
- -solaris)
- os=-solaris2
- ;;
- -svr4*)
- os=-sysv4
- ;;
- -unixware*)
- os=-sysv4.2uw
- ;;
- -gnu/linux*)
- os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
- ;;
- # First accept the basic system types.
- # The portable systems comes first.
- # Each alternative MUST END IN A *, to match a version number.
- # -sysv* is not here because it comes later, after sysvr4.
- -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
- | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\
- | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \
- | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
- | -aos* \
- | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
- | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
- | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* | -openbsd* \
- | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
- | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
- | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
- | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
- | -chorusos* | -chorusrdb* \
- | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
- | -mingw32* | -linux-gnu* | -linux-uclibc* | -uxpv* | -beos* | -mpeix* | -udk* \
- | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
- | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
- | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
- | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
- | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
- | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
- | -skyos* | -haiku*)
- # Remember, each alternative MUST END IN *, to match a version number.
- ;;
- -qnx*)
- case $basic_machine in
- x86-* | i*86-*)
- ;;
- *)
- os=-nto$os
- ;;
- esac
- ;;
- -nto-qnx*)
- ;;
- -nto*)
- os=`echo $os | sed -e 's|nto|nto-qnx|'`
- ;;
- -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
- | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
- | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
- ;;
- -mac*)
- os=`echo $os | sed -e 's|mac|macos|'`
- ;;
- -linux-dietlibc)
- os=-linux-dietlibc
- ;;
- -linux*)
- os=`echo $os | sed -e 's|linux|linux-gnu|'`
- ;;
- -sunos5*)
- os=`echo $os | sed -e 's|sunos5|solaris2|'`
- ;;
- -sunos6*)
- os=`echo $os | sed -e 's|sunos6|solaris3|'`
- ;;
- -opened*)
- os=-openedition
- ;;
- -os400*)
- os=-os400
- ;;
- -wince*)
- os=-wince
- ;;
- -osfrose*)
- os=-osfrose
- ;;
- -osf*)
- os=-osf
- ;;
- -utek*)
- os=-bsd
- ;;
- -dynix*)
- os=-bsd
- ;;
- -acis*)
- os=-aos
- ;;
- -atheos*)
- os=-atheos
- ;;
- -syllable*)
- os=-syllable
- ;;
- -386bsd)
- os=-bsd
- ;;
- -ctix* | -uts*)
- os=-sysv
- ;;
- -nova*)
- os=-rtmk-nova
- ;;
- -ns2 )
- os=-nextstep2
- ;;
- -nsk*)
- os=-nsk
- ;;
- # Preserve the version number of sinix5.
- -sinix5.*)
- os=`echo $os | sed -e 's|sinix|sysv|'`
- ;;
- -sinix*)
- os=-sysv4
- ;;
- -tpf*)
- os=-tpf
- ;;
- -triton*)
- os=-sysv3
- ;;
- -oss*)
- os=-sysv3
- ;;
- -svr4)
- os=-sysv4
- ;;
- -svr3)
- os=-sysv3
- ;;
- -sysvr4)
- os=-sysv4
- ;;
- # This must come after -sysvr4.
- -sysv*)
- ;;
- -ose*)
- os=-ose
- ;;
- -es1800*)
- os=-ose
- ;;
- -xenix)
- os=-xenix
- ;;
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
- os=-mint
- ;;
- -aros*)
- os=-aros
- ;;
- -kaos*)
- os=-kaos
- ;;
- -zvmoe)
- os=-zvmoe
- ;;
- -none)
- ;;
- *)
- # Get rid of the `-' at the beginning of $os.
- os=`echo $os | sed 's/[^-]*-//'`
- echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
- exit 1
- ;;
-esac
-else
-
-# Here we handle the default operating systems that come with various machines.
-# The value should be what the vendor currently ships out the door with their
-# machine or put another way, the most popular os provided with the machine.
-
-# Note that if you're going to try to match "-MANUFACTURER" here (say,
-# "-sun"), then you have to tell the case statement up towards the top
-# that MANUFACTURER isn't an operating system. Otherwise, code above
-# will signal an error saying that MANUFACTURER isn't an operating
-# system, and we'll never get to this point.
-
-case $basic_machine in
- *-acorn)
- os=-riscix1.2
- ;;
- arm*-rebel)
- os=-linux
- ;;
- arm*-semi)
- os=-aout
- ;;
- c4x-* | tic4x-*)
- os=-coff
- ;;
- # This must come before the *-dec entry.
- pdp10-*)
- os=-tops20
- ;;
- pdp11-*)
- os=-none
- ;;
- *-dec | vax-*)
- os=-ultrix4.2
- ;;
- m68*-apollo)
- os=-domain
- ;;
- i386-sun)
- os=-sunos4.0.2
- ;;
- m68000-sun)
- os=-sunos3
- # This also exists in the configure program, but was not the
- # default.
- # os=-sunos4
- ;;
- m68*-cisco)
- os=-aout
- ;;
- mips*-cisco)
- os=-elf
- ;;
- mips*-*)
- os=-elf
- ;;
- or32-*)
- os=-coff
- ;;
- *-tti) # must be before sparc entry or we get the wrong os.
- os=-sysv3
- ;;
- sparc-* | *-sun)
- os=-sunos4.1.1
- ;;
- *-be)
- os=-beos
- ;;
- *-haiku)
- os=-haiku
- ;;
- *-ibm)
- os=-aix
- ;;
- *-knuth)
- os=-mmixware
- ;;
- *-wec)
- os=-proelf
- ;;
- *-winbond)
- os=-proelf
- ;;
- *-oki)
- os=-proelf
- ;;
- *-hp)
- os=-hpux
- ;;
- *-hitachi)
- os=-hiux
- ;;
- i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
- os=-sysv
- ;;
- *-cbm)
- os=-amigaos
- ;;
- *-dg)
- os=-dgux
- ;;
- *-dolphin)
- os=-sysv3
- ;;
- m68k-ccur)
- os=-rtu
- ;;
- m88k-omron*)
- os=-luna
- ;;
- *-next )
- os=-nextstep
- ;;
- *-sequent)
- os=-ptx
- ;;
- *-crds)
- os=-unos
- ;;
- *-ns)
- os=-genix
- ;;
- i370-*)
- os=-mvs
- ;;
- *-next)
- os=-nextstep3
- ;;
- *-gould)
- os=-sysv
- ;;
- *-highlevel)
- os=-bsd
- ;;
- *-encore)
- os=-bsd
- ;;
- *-sgi)
- os=-irix
- ;;
- *-siemens)
- os=-sysv4
- ;;
- *-masscomp)
- os=-rtu
- ;;
- f30[01]-fujitsu | f700-fujitsu)
- os=-uxpv
- ;;
- *-rom68k)
- os=-coff
- ;;
- *-*bug)
- os=-coff
- ;;
- *-apple)
- os=-macos
- ;;
- *-atari*)
- os=-mint
- ;;
- *)
- os=-none
- ;;
-esac
-fi
-
-# Here we handle the case where we know the os, and the CPU type, but not the
-# manufacturer. We pick the logical manufacturer.
-vendor=unknown
-case $basic_machine in
- *-unknown)
- case $os in
- -riscix*)
- vendor=acorn
- ;;
- -sunos*)
- vendor=sun
- ;;
- -aix*)
- vendor=ibm
- ;;
- -beos*)
- vendor=be
- ;;
- -hpux*)
- vendor=hp
- ;;
- -mpeix*)
- vendor=hp
- ;;
- -hiux*)
- vendor=hitachi
- ;;
- -unos*)
- vendor=crds
- ;;
- -dgux*)
- vendor=dg
- ;;
- -luna*)
- vendor=omron
- ;;
- -genix*)
- vendor=ns
- ;;
- -mvs* | -opened*)
- vendor=ibm
- ;;
- -os400*)
- vendor=ibm
- ;;
- -ptx*)
- vendor=sequent
- ;;
- -tpf*)
- vendor=ibm
- ;;
- -vxsim* | -vxworks* | -windiss*)
- vendor=wrs
- ;;
- -aux*)
- vendor=apple
- ;;
- -hms*)
- vendor=hitachi
- ;;
- -mpw* | -macos*)
- vendor=apple
- ;;
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
- vendor=atari
- ;;
- -vos*)
- vendor=stratus
- ;;
- esac
- basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
- ;;
-esac
-
-echo $basic_machine$os
-exit
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:
diff --git a/configure.in b/configure.in
deleted file mode 100644
index 453f982a6..000000000
--- a/configure.in
+++ /dev/null
@@ -1,364 +0,0 @@
-AC_INIT(moses/src)
-
-AM_CONFIG_HEADER(config.h)
-AM_INIT_AUTOMAKE(moses, 0.1)
-
-AC_CONFIG_MACRO_DIR([m4])
-
-AC_PROG_CXX
-AC_PROG_CXXCPP
-AC_LANG_CPLUSPLUS
-
-AC_DISABLE_SHARED
-AC_PROG_LIBTOOL
-# Shared library are disabled for default
-#LT_INIT([disable-shared])
-
-AX_XMLRPC_C
-BOOST_REQUIRE([1.37.0])
-BOOST_SMART_PTR
-BOOST_PROGRAM_OPTIONS
-
-ac_have_mpi=no
-AC_ARG_WITH(mpi,
- AC_HELP_STRING([--with-mpi],
- [Force compilation with MPI]),
- [ if test $withval != no ; then
- ac_have_mpi=yes
- fi ] )
-if test $ac_have_mpi = yes ; then
- AC_PATH_PROG(CXX, mpic++, none)
- if test $CXX = none ; then
- AC_MSG_ERROR([Cannot locate MPI compiler drivers])
- fi
- BOOST_SERIALIZATION
- BOOST_MPI
- AC_DEFINE(MPI_ENABLED,1,[Define if compiling with MPI.])
- CPPFLAGS="$CPPFLAGS -DMPI_ENABLE=1"
-fi
-
-
-AC_ARG_WITH(protobuf,
- [AC_HELP_STRING([--with-protobuf=PATH], [(optional) path to Google protobuf])],
- [with_protobuf=$withval],
- [with_protobuf=no]
- )
-
-AC_ARG_WITH(srilm,
- [AC_HELP_STRING([--with-srilm=PATH], [(optional) path to SRI's LM toolkit])],
- [with_srilm=$withval],
- [with_srilm=no]
- )
-
-AC_ARG_WITH(srilm-dynamic,
- [AC_HELP_STRING([--with-srilm-dynamic], [(optional) link dynamically with srilm])],
- [with_srilm_dynamic=yes],
- [with_srilm_dynamic=no]
- )
-
-AC_ARG_WITH(srilm-arch,
- [AC_HELP_STRING([--with-srilm-arch=ARCH], [(optional) architecture for which SRILM was built])],
- [with_srilm_arch=$withval],
- [with_srilm_arch=no]
- )
-
-
-AC_ARG_WITH(irstlm,
- [AC_HELP_STRING([--with-irstlm=PATH], [(optional) path to IRST's LM toolkit])],
- [with_irstlm=$withval],
- [with_irstlm=no]
- )
-
-AC_ARG_WITH(randlm,
- [AC_HELP_STRING([--with-randlm=PATH], [(optional) path to RandLM toolkit])],
- [with_randlm=$withval],
- [with_randlm=no]
- )
-AC_ARG_WITH(orlm,
- [AC_HELP_STRING([--with-orlm=PATH], [(optional) path to ORLM])],
- [with_orlm=$withval],
- [with_orlm=no]
- )
-AC_ARG_WITH(dmaplm,
- [AC_HELP_STRING([--with-dmaplm=PATH], [(optional) path to DMapLM])],
- [with_dmaplm=$withval],
- [with_dmaplm=no]
- )
-
-AC_ARG_WITH(synlm,
- [AC_HELP_STRING([--with-synlm], [(optional) Include syntactic language model parser; default is no])],
- [with_synlm=$withval],
- [with_synlm=no]
- )
-
-AC_ARG_WITH(notrace,
- [AC_HELP_STRING([--notrace], [disable trace])],
- [without_trace=yes],
- )
-
-
-
-AC_ARG_ENABLE(profiling,
- [AC_HELP_STRING([--enable-profiling], [moses will dump profiling info])],
- [CPPFLAGS="$CPPFLAGS -pg"; LDFLAGS="$LDFLAGS -pg" ]
- )
-
-AC_ARG_ENABLE(optimization,
- [AC_HELP_STRING([--enable-optimization], [compile with -O3 flag])],
- [CPPFLAGS="$CPPFLAGS -O3"; LDFLAGS="$LDFLAGS -O3" ]
- )
-
-AC_ARG_ENABLE(threads,
- [AC_HELP_STRING([--enable-threads], [compile threadsafe library and multi-threaded moses (mosesmt)])],
- [],
- [enable_threads=no]
- )
-
-AC_ARG_ENABLE(unittest,
- [AC_HELP_STRING([--enable-unittest],[build unit tests - requires boost test])],
- [enable_unittest=yes]
- )
-
-AC_ARG_WITH(zlib,
- [AC_HELP_STRING([--with-zlib=PATH], [(optional) path to zlib])],
- [with_zlib=$withval],
- [with_zlib=no]
- )
-
-AC_ARG_WITH(tcmalloc,
- [AC_HELP_STRING([--with-tcmalloc], [(optional) link with tcmalloc; default is no])],
- [with_tcmalloc=$withval],
- [with_tcmalloc=no]
- )
-
-AC_ARG_ENABLE(boost-pool,
- [AC_HELP_STRING([--enable-boost-pool], [(optional) try to improve speed by selectively using Boost pool allocation (may increase total memory use); default is yes if Boost enabled])],
- [enable_boost_pool=yes],
- [enable_boost_pool=no]
- )
-
-
-AM_CONDITIONAL([INTERNAL_LM], false)
-AM_CONDITIONAL([SRI_LM], false)
-AM_CONDITIONAL([IRST_LM], false)
-AM_CONDITIONAL([KEN_LM], false)
-AM_CONDITIONAL([RAND_LM], false)
-AM_CONDITIONAL([ORLM_LM], false)
-AM_CONDITIONAL([DMAP_LM], false)
-AM_CONDITIONAL([SYN_LM], false)
-AM_CONDITIONAL([PROTOBUF], false)
-AM_CONDITIONAL([am__fastdepCC], false)
-AM_CONDITIONAL([WITH_THREADS],false)
-
-
-if test "x$without_trace" = 'xyes'
-then
- AC_MSG_NOTICE([trace disabled, most regression test will fail])
-else
- AC_MSG_NOTICE([trace enabled (default)])
- CPPFLAGS="$CPPFLAGS -DTRACE_ENABLE=1"
-fi
-
-if test "x$enable_threads" = 'xyes'
-then
- AC_MSG_NOTICE([Building threaded moses])
- BOOST_THREADS
- CPPFLAGS="$CPPFLAGS -DWITH_THREADS"
- AM_CONDITIONAL([WITH_THREADS],true)
-else
- AC_MSG_NOTICE([Building non-threaded moses. This will disable the moses server])
-fi
-
-AM_CONDITIONAL([WITH_UNITTEST], false)
-if test "x$enable_unittest" = 'xyes'
-then
- AC_MSG_NOTICE([Building unit tests])
- AM_CONDITIONAL([WITH_UNITTEST], true)
-fi
-
-
-if test "x$with_protobuf" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_protobuf}/include"
-
- AC_CHECK_HEADER(google/protobuf/message.h,
- [AC_DEFINE([HAVE_PROTOBUF], [], [flag for protobuf])],
- [AC_MSG_ERROR([Cannot find protobuf!])])
-
- LIB_PROTOBUF="-lprotobuf"
- LDFLAGS="$LDFLAGS -L${with_protobuf}/lib"
- LIBS="$LIBS $LIB_PROTOBUF"
- AC_PATH_PROG(PROTOC,protoc,,"${PATH}:${with_protobuf}/bin")
- FMTLIBS="$FMTLIBS libprotobuf.a"
- AM_CONDITIONAL([PROTOBUF], true)
-fi
-
-if test "x$with_srilm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_srilm}/include"
-
- AC_CHECK_HEADER(Ngram.h,
- [AC_DEFINE([HAVE_SRILM], [], [flag for SRILM])],
- [AC_MSG_ERROR([Cannot find SRILM!])])
-
- if test "x$with_srilm_dynamic" != 'xyes'
- then
- LIB_SRILM="-loolm -ldstruct -lmisc -lflm"
- # ROOT/lib/i686-m64/liboolm.a
- # ROOT/lib/i686-m64/libdstruct.a
- # ROOT/lib/i686-m64/libmisc.a
- if test "x$with_srilm_arch" != 'xno'
- then
- MY_ARCH=${with_srilm_arch}
- else
- MY_ARCH=`${with_srilm}/sbin/machine-type`
- fi
- LDFLAGS="$LDFLAGS -L${with_srilm}/lib/${MY_ARCH} -L${with_srilm}/flm/obj/${MY_ARCH}"
- LIBS="$LIBS $LIB_SRILM"
- FMTLIBS="$FMTLIBS liboolm.a libdstruct.a libmisc.a"
- else
- LDFLAGS="$LDFLAGS -L${with_srilm}/lib"
- LIBS="$LIBS -lsrilm"
- fi
- AC_CHECK_LIB([oolm], [trigram_init], [], [AC_MSG_ERROR([Cannot find SRILM's library in ${with_srilm}/lib/${MY_ARCH} ])])
- AM_CONDITIONAL([SRI_LM], true)
-fi
-
-if test "x$with_irstlm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_irstlm}/include"
-
-
- AC_MSG_NOTICE([])
- AC_MSG_NOTICE([!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!])
- AC_MSG_NOTICE([!!! You are linking the IRSTLM library; be sure the release is >= 5.70.02 !!!])
- AC_MSG_NOTICE([!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!])
- AC_MSG_NOTICE([])
-
-
- AC_CHECK_HEADER(n_gram.h,
- [AC_DEFINE([HAVE_IRSTLM], [], [flag for IRSTLM])],
- [AC_MSG_ERROR([Cannot find IRST-LM in ${with_irstlm}])])
-
- MY_ARCH=`uname -m`
- LIB_IRSTLM="-lirstlm"
- LDFLAGS="$LDFLAGS -L${with_irstlm}/lib"
- LIBS="$LIBS $LIB_IRSTLM"
- FMTLIBS="$FMTLIBS libirstlm.a"
- AM_CONDITIONAL([IRST_LM], true)
-fi
-
-CPPFLAGS="$CPPFLAGS -I\$(top_srcdir)"
-#LDFLAGS="$LDFLAGS -L\$(top_srcdir)/util -lkenutil -L\$(top_srcdir)/lm -lkenlm -lz"
-#KENUTIL_DEPS="\$(top_srcdir)/util/libkenutil.la"
-#KENLM_DEPS="\$(top_srcdir)/lm/libkenlm.la"
-#FMTLIBS="$FMTLIBS libkenutil.la libkenlm.la"
-#AC_SUBST(KENUTIL_DEPS)
-#AC_SUBST(KENLM_DEPS)
-
-if test "x$with_randlm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_randlm}/include"
-
- AC_CHECK_HEADER(RandLM.h,
- [AC_DEFINE([HAVE_RANDLM], [], [flag for RandLM])],
- [AC_MSG_ERROR([Cannot find RandLM!])])
-
-
- MY_ARCH=`uname -m`
- LIB_RANDLM="-lrandlm"
- LDFLAGS="$LDFLAGS -L${with_randlm}/lib"
- LIBS="$LIBS $LIB_RANDLM"
- FMTLIBS="$FMTLIBS librandlm.a"
- AM_CONDITIONAL([RAND_LM], true)
-fi
-
-if test "x$with_dmaplm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_dmaplm}/src/DMap"
-
- AC_CHECK_HEADER(StructLanguageModel.h,
- [AC_DEFINE([HAVE_DMAPLM], [], [flag for DMapLM])],
- [AC_MSG_ERROR([Cannot find DMapLM!])])
-
- LDFLAGS="$LDFLAGS -L${with_dmaplm}/src/DMap"
- LIBS="$LIBS -lDMap"
- FMTLIBS="FMTLIBS libdmap.la"
- AM_CONDITIONAL([DMAP_LM], true)
-fi
-
-if test "x$with_orlm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${with_orlm}/"
-
- AC_CHECK_HEADER(onlineRLM.h,
-#AC_CHECK_HEADER(multiOnlineRLM.h,
- [AC_DEFINE([HAVE_ORLM], [], [flag for ORLM])],
- [AC_MSG_ERROR([Cannot find ORLM!])])
-
- MY_ARCH=`uname -m`
- AM_CONDITIONAL([ORLM_LM], true)
-fi
-if test "x$with_tcmalloc" != 'xno'
-then
- AC_CHECK_LIB([tcmalloc], [malloc], [], [AC_MSG_ERROR([Cannot find tcmalloc])])
-fi
-
-
-if test "x$enable_boost_pool" != 'xno'
-then
- AC_CHECK_HEADER(boost/pool/object_pool.hpp,
- [AC_DEFINE([USE_BOOST_POOL], [], [Flag to enable use of Boost pool])],
- [AC_MSG_WARN([Cannot find boost/pool/object_pool.hpp])]
- )
-fi
-
-if test "x$with_synlm" != 'xno'
-then
- SAVE_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS -I${PWD}/synlm/hhmm/rvtl/include -I${PWD}/synlm/hhmm/wsjparse/include -lm"
-
- AC_CHECK_HEADERS(nl-cpt.h,
- [AC_DEFINE([HAVE_SYNLM], [], [flag for Syntactic Parser])],
- [AC_MSG_ERROR([Cannot find SYNLM in ${PWD}/synlm/hhmm])])
-
- AM_CONDITIONAL([SYN_LM], true)
-
-fi
-
-
-AM_CONDITIONAL([WITH_MERT],false)
-AC_CHECK_HEADERS([getopt.h],
- [AM_CONDITIONAL([WITH_MERT],true)],
- [AC_MSG_WARN([Cannot find getopt.h - disabling new mert])])
-
-AM_CONDITIONAL([WITH_SERVER],false)
-if test "x$have_xmlrpc_c" = "xyes" && test "x$enable_threads" = "xyes"; then
- AM_CONDITIONAL([WITH_SERVER],true)
-else
- AC_MSG_NOTICE([Disabling server])
-fi
-
-if test "x$with_zlib" != 'xno'
-then
- CPPFLAGS="$CPPFLAGS -I${with_zlib}/include"
- LDFLAGS="$LDFLAGS -L${with_zlib}/lib"
-fi
-
-# zlib is always required (see ./moses/src/gzfilebuf.h)
-# TODO: This shouldn't be presented to the user as a config option if it isn't actually an option
-AC_CHECK_HEADER(zlib.h,
- [AC_DEFINE([HAVE_ZLIB], [], [flag for zlib])],
- [AC_MSG_ERROR([Cannot find zlib.h. Please install it. For Debian, try 'sudo aptitude install zlib1g-dev'])])
-LIBS="$LIBS -lz"
-
-
-AC_CONFIG_FILES(Makefile OnDiskPt/src/Makefile moses/src/Makefile moses-cmd/src/Makefile moses-chart-cmd/src/Makefile misc/Makefile mert/Makefile server/Makefile CreateOnDisk/src/Makefile util/Makefile mira/Makefile lm/Makefile unittest/Makefile)
-
-AC_OUTPUT()
diff --git a/scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.py b/contrib/Extract_TMX_Corpus/Extract_TMX_Corpus.py
index fd67d4b3a..fd67d4b3a 100755
--- a/scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.py
+++ b/contrib/Extract_TMX_Corpus/Extract_TMX_Corpus.py
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py b/contrib/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py
index 93e19edf2..93e19edf2 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py
+++ b/contrib/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt b/contrib/Extract_TMX_Corpus/LanguageCodes.txt
index 22ca66c73..22ca66c73 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt
+++ b/contrib/Extract_TMX_Corpus/LanguageCodes.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt b/contrib/Extract_TMX_Corpus/LanguagePairs.txt
index d2ffd094e..d2ffd094e 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt
+++ b/contrib/Extract_TMX_Corpus/LanguagePairs.txt
diff --git a/scripts/other/Extract_TMX_Corpus/_READ_ME_FIRST.txt b/contrib/Extract_TMX_Corpus/_READ_ME_FIRST.txt
index 4709b8240..4709b8240 100644
--- a/scripts/other/Extract_TMX_Corpus/_READ_ME_FIRST.txt
+++ b/contrib/Extract_TMX_Corpus/_READ_ME_FIRST.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt b/contrib/Extract_TMX_Corpus/gpl.txt
index 818433ecc..818433ecc 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt
+++ b/contrib/Extract_TMX_Corpus/gpl.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt b/contrib/Moses2TMX/LanguageCodes.txt
index 22ca66c73..22ca66c73 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt
+++ b/contrib/Moses2TMX/LanguageCodes.txt
diff --git a/scripts/other/Moses2TMX/Moses2TMX.py b/contrib/Moses2TMX/Moses2TMX.py
index b032fe9c5..b032fe9c5 100755
--- a/scripts/other/Moses2TMX/Moses2TMX.py
+++ b/contrib/Moses2TMX/Moses2TMX.py
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py b/contrib/Moses2TMX/Moses2TMX.rsrc.py
index dc1570c7f..dc1570c7f 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py
+++ b/contrib/Moses2TMX/Moses2TMX.rsrc.py
diff --git a/scripts/other/Moses2TMX/_READ_ME_FIRST.txt b/contrib/Moses2TMX/_READ_ME_FIRST.txt
index cbc667a31..cbc667a31 100644
--- a/scripts/other/Moses2TMX/_READ_ME_FIRST.txt
+++ b/contrib/Moses2TMX/_READ_ME_FIRST.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt b/contrib/Moses2TMX/gpl.txt
index 818433ecc..818433ecc 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt
+++ b/contrib/Moses2TMX/gpl.txt
diff --git a/moses-cmd/src/checkplf.cpp b/contrib/checkplf/checkplf.cpp
index f8de29e8e..f8de29e8e 100644
--- a/moses-cmd/src/checkplf.cpp
+++ b/contrib/checkplf/checkplf.cpp
diff --git a/scripts/training/eppex/ISS.h b/contrib/eppex/ISS.h
index 7921fcbf8..7921fcbf8 100644
--- a/scripts/training/eppex/ISS.h
+++ b/contrib/eppex/ISS.h
diff --git a/scripts/training/eppex/IndexedPhrasesPair.h b/contrib/eppex/IndexedPhrasesPair.h
index 18e3a39fd..18e3a39fd 100644
--- a/scripts/training/eppex/IndexedPhrasesPair.h
+++ b/contrib/eppex/IndexedPhrasesPair.h
diff --git a/scripts/training/eppex/LossyCounter.h b/contrib/eppex/LossyCounter.h
index 57cce079d..57cce079d 100644
--- a/scripts/training/eppex/LossyCounter.h
+++ b/contrib/eppex/LossyCounter.h
diff --git a/scripts/training/eppex/Makefile.am b/contrib/eppex/Makefile.am
index 970cc5d61..970cc5d61 100644
--- a/scripts/training/eppex/Makefile.am
+++ b/contrib/eppex/Makefile.am
diff --git a/contrib/eppex/Makefile.in b/contrib/eppex/Makefile.in
new file mode 100644
index 000000000..80fca7ce6
--- /dev/null
+++ b/contrib/eppex/Makefile.in
@@ -0,0 +1,762 @@
+# Makefile.in generated by automake 1.11.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation,
+# Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+# eppex - epochal phrase table extraction for Statistical Machine Translation
+# Ceslav Przywara, UFAL MFF UK, Prague, 2011
+# $Id: $
+
+# Process this file with automake to produce Makefile.in
+
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+bin_PROGRAMS = counter$(EXEEXT) eppex$(EXEEXT)
+subdir = .
+DIST_COMMON = $(am__configure_deps) $(srcdir)/Makefile.am \
+ $(srcdir)/Makefile.in $(srcdir)/config.h.in \
+ $(top_srcdir)/configure depcomp install-sh missing
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_boost_base.m4 \
+ $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+ $(ACLOCAL_M4)
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno config.status.lineno
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+am__installdirs = "$(DESTDIR)$(bindir)"
+PROGRAMS = $(bin_PROGRAMS)
+am_counter_OBJECTS = counter-tables-core.$(OBJEXT) \
+ counter-SentenceAlignment.$(OBJEXT) \
+ counter-phrase-extract.$(OBJEXT) counter-shared.$(OBJEXT) \
+ counter-counter.$(OBJEXT)
+counter_OBJECTS = $(am_counter_OBJECTS)
+counter_LDADD = $(LDADD)
+counter_LINK = $(CXXLD) $(counter_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \
+ $(LDFLAGS) -o $@
+am_eppex_OBJECTS = tables-core.$(OBJEXT) SentenceAlignment.$(OBJEXT) \
+ phrase-extract.$(OBJEXT) shared.$(OBJEXT) eppex.$(OBJEXT)
+eppex_OBJECTS = $(am_eppex_OBJECTS)
+eppex_LDADD = $(LDADD)
+DEFAULT_INCLUDES = -I.@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+am__mv = mv -f
+CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
+CXXLD = $(CXX)
+CXXLINK = $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) \
+ -o $@
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
+SOURCES = $(counter_SOURCES) $(eppex_SOURCES)
+DIST_SOURCES = $(counter_SOURCES) $(eppex_SOURCES)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+am__remove_distdir = \
+ { test ! -d "$(distdir)" \
+ || { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
+ && rm -fr "$(distdir)"; }; }
+DIST_ARCHIVES = $(distdir).tar.gz
+GZIP_ENV = --best
+distuninstallcheck_listfiles = find . -type f -print
+distcleancheck_listfiles = find . -type f -print
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+BOOST_CPPFLAGS = @BOOST_CPPFLAGS@
+BOOST_LDFLAGS = @BOOST_LDFLAGS@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build_alias = @build_alias@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host_alias = @host_alias@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+ACLOCAL_AMFLAGS = -I m4
+AUTOMAKE_OPTIONS = foreign
+# Note: during development eppex has been compiled with -O6, but this flag
+# gets overwritten by -O2 set by automake.
+AM_CXXFLAGS = $(BOOST_CPPFLAGS) -Wall
+
+# Counter shares only some functionality of phrase-extract module.
+counter_CXXFLAGS = -DGET_COUNTS_ONLY
+
+# Uncomment to use std::tr1::unordered_map insteap of std::map in Lossy Counter implementation.
+# This is NOT recommended in the moment (hashing function needs to be optimized).
+#eppex_CXXFLAGS = -DUSE_UNORDERED_MAP
+counter_SOURCES = ../phrase-extract/tables-core.h ../phrase-extract/SentenceAlignment.h config.h phrase-extract.h shared.h IndexedPhrasesPair.h LossyCounter.h \
+ ../phrase-extract/tables-core.cpp ../phrase-extract/SentenceAlignment.cpp phrase-extract.cpp shared.cpp counter.cpp
+
+eppex_SOURCES = ../phrase-extract/tables-core.h ../phrase-extract/SentenceAlignment.h config.h phrase-extract.h shared.h IndexedPhrasesPair.h LossyCounter.h \
+ ../phrase-extract/tables-core.cpp ../phrase-extract/SentenceAlignment.cpp phrase-extract.cpp shared.cpp eppex.cpp
+
+all: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-am
+
+.SUFFIXES:
+.SUFFIXES: .cpp .o .obj
+am--refresh:
+ @:
+$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
+ @for dep in $?; do \
+ case '$(am__configure_deps)' in \
+ *$$dep*) \
+ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
+ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
+ && exit 0; \
+ exit 1;; \
+ esac; \
+ done; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
+ $(am__cd) $(top_srcdir) && \
+ $(AUTOMAKE) --foreign Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ @case '$?' in \
+ *config.status*) \
+ echo ' $(SHELL) ./config.status'; \
+ $(SHELL) ./config.status;; \
+ *) \
+ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
+ esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+ $(SHELL) ./config.status --recheck
+
+$(top_srcdir)/configure: $(am__configure_deps)
+ $(am__cd) $(srcdir) && $(AUTOCONF)
+$(ACLOCAL_M4): $(am__aclocal_m4_deps)
+ $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+$(am__aclocal_m4_deps):
+
+config.h: stamp-h1
+ @if test ! -f $@; then \
+ rm -f stamp-h1; \
+ $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \
+ else :; fi
+
+stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status
+ @rm -f stamp-h1
+ cd $(top_builddir) && $(SHELL) ./config.status config.h
+$(srcdir)/config.h.in: $(am__configure_deps)
+ ($(am__cd) $(top_srcdir) && $(AUTOHEADER))
+ rm -f stamp-h1
+ touch $@
+
+distclean-hdr:
+ -rm -f config.h stamp-h1
+install-binPROGRAMS: $(bin_PROGRAMS)
+ @$(NORMAL_INSTALL)
+ test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)"
+ @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
+ for p in $$list; do echo "$$p $$p"; done | \
+ sed 's/$(EXEEXT)$$//' | \
+ while read p p1; do if test -f $$p; \
+ then echo "$$p"; echo "$$p"; else :; fi; \
+ done | \
+ sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \
+ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \
+ sed 'N;N;N;s,\n, ,g' | \
+ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \
+ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
+ if ($$2 == $$4) files[d] = files[d] " " $$1; \
+ else { print "f", $$3 "/" $$4, $$1; } } \
+ END { for (d in files) print "f", d, files[d] }' | \
+ while read type dir files; do \
+ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
+ test -z "$$files" || { \
+ echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \
+ $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \
+ } \
+ ; done
+
+uninstall-binPROGRAMS:
+ @$(NORMAL_UNINSTALL)
+ @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
+ files=`for p in $$list; do echo "$$p"; done | \
+ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \
+ -e 's/$$/$(EXEEXT)/' `; \
+ test -n "$$list" || exit 0; \
+ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \
+ cd "$(DESTDIR)$(bindir)" && rm -f $$files
+
+clean-binPROGRAMS:
+ -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS)
+counter$(EXEEXT): $(counter_OBJECTS) $(counter_DEPENDENCIES)
+ @rm -f counter$(EXEEXT)
+ $(counter_LINK) $(counter_OBJECTS) $(counter_LDADD) $(LIBS)
+eppex$(EXEEXT): $(eppex_OBJECTS) $(eppex_DEPENDENCIES)
+ @rm -f eppex$(EXEEXT)
+ $(CXXLINK) $(eppex_OBJECTS) $(eppex_LDADD) $(LIBS)
+
+mostlyclean-compile:
+ -rm -f *.$(OBJEXT)
+
+distclean-compile:
+ -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/SentenceAlignment.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/counter-SentenceAlignment.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/counter-counter.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/counter-phrase-extract.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/counter-shared.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/counter-tables-core.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/eppex.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/phrase-extract.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/shared.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tables-core.Po@am__quote@
+
+.cpp.o:
+@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $<
+
+.cpp.obj:
+@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
+
+counter-tables-core.o: ../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-tables-core.o -MD -MP -MF $(DEPDIR)/counter-tables-core.Tpo -c -o counter-tables-core.o `test -f '../phrase-extract/tables-core.cpp' || echo '$(srcdir)/'`../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-tables-core.Tpo $(DEPDIR)/counter-tables-core.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/tables-core.cpp' object='counter-tables-core.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-tables-core.o `test -f '../phrase-extract/tables-core.cpp' || echo '$(srcdir)/'`../phrase-extract/tables-core.cpp
+
+counter-tables-core.obj: ../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-tables-core.obj -MD -MP -MF $(DEPDIR)/counter-tables-core.Tpo -c -o counter-tables-core.obj `if test -f '../phrase-extract/tables-core.cpp'; then $(CYGPATH_W) '../phrase-extract/tables-core.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/tables-core.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-tables-core.Tpo $(DEPDIR)/counter-tables-core.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/tables-core.cpp' object='counter-tables-core.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-tables-core.obj `if test -f '../phrase-extract/tables-core.cpp'; then $(CYGPATH_W) '../phrase-extract/tables-core.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/tables-core.cpp'; fi`
+
+counter-SentenceAlignment.o: ../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-SentenceAlignment.o -MD -MP -MF $(DEPDIR)/counter-SentenceAlignment.Tpo -c -o counter-SentenceAlignment.o `test -f '../phrase-extract/SentenceAlignment.cpp' || echo '$(srcdir)/'`../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-SentenceAlignment.Tpo $(DEPDIR)/counter-SentenceAlignment.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/SentenceAlignment.cpp' object='counter-SentenceAlignment.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-SentenceAlignment.o `test -f '../phrase-extract/SentenceAlignment.cpp' || echo '$(srcdir)/'`../phrase-extract/SentenceAlignment.cpp
+
+counter-SentenceAlignment.obj: ../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-SentenceAlignment.obj -MD -MP -MF $(DEPDIR)/counter-SentenceAlignment.Tpo -c -o counter-SentenceAlignment.obj `if test -f '../phrase-extract/SentenceAlignment.cpp'; then $(CYGPATH_W) '../phrase-extract/SentenceAlignment.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/SentenceAlignment.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-SentenceAlignment.Tpo $(DEPDIR)/counter-SentenceAlignment.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/SentenceAlignment.cpp' object='counter-SentenceAlignment.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-SentenceAlignment.obj `if test -f '../phrase-extract/SentenceAlignment.cpp'; then $(CYGPATH_W) '../phrase-extract/SentenceAlignment.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/SentenceAlignment.cpp'; fi`
+
+counter-phrase-extract.o: phrase-extract.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-phrase-extract.o -MD -MP -MF $(DEPDIR)/counter-phrase-extract.Tpo -c -o counter-phrase-extract.o `test -f 'phrase-extract.cpp' || echo '$(srcdir)/'`phrase-extract.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-phrase-extract.Tpo $(DEPDIR)/counter-phrase-extract.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='phrase-extract.cpp' object='counter-phrase-extract.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-phrase-extract.o `test -f 'phrase-extract.cpp' || echo '$(srcdir)/'`phrase-extract.cpp
+
+counter-phrase-extract.obj: phrase-extract.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-phrase-extract.obj -MD -MP -MF $(DEPDIR)/counter-phrase-extract.Tpo -c -o counter-phrase-extract.obj `if test -f 'phrase-extract.cpp'; then $(CYGPATH_W) 'phrase-extract.cpp'; else $(CYGPATH_W) '$(srcdir)/phrase-extract.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-phrase-extract.Tpo $(DEPDIR)/counter-phrase-extract.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='phrase-extract.cpp' object='counter-phrase-extract.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-phrase-extract.obj `if test -f 'phrase-extract.cpp'; then $(CYGPATH_W) 'phrase-extract.cpp'; else $(CYGPATH_W) '$(srcdir)/phrase-extract.cpp'; fi`
+
+counter-shared.o: shared.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-shared.o -MD -MP -MF $(DEPDIR)/counter-shared.Tpo -c -o counter-shared.o `test -f 'shared.cpp' || echo '$(srcdir)/'`shared.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-shared.Tpo $(DEPDIR)/counter-shared.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='shared.cpp' object='counter-shared.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-shared.o `test -f 'shared.cpp' || echo '$(srcdir)/'`shared.cpp
+
+counter-shared.obj: shared.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-shared.obj -MD -MP -MF $(DEPDIR)/counter-shared.Tpo -c -o counter-shared.obj `if test -f 'shared.cpp'; then $(CYGPATH_W) 'shared.cpp'; else $(CYGPATH_W) '$(srcdir)/shared.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-shared.Tpo $(DEPDIR)/counter-shared.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='shared.cpp' object='counter-shared.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-shared.obj `if test -f 'shared.cpp'; then $(CYGPATH_W) 'shared.cpp'; else $(CYGPATH_W) '$(srcdir)/shared.cpp'; fi`
+
+counter-counter.o: counter.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-counter.o -MD -MP -MF $(DEPDIR)/counter-counter.Tpo -c -o counter-counter.o `test -f 'counter.cpp' || echo '$(srcdir)/'`counter.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-counter.Tpo $(DEPDIR)/counter-counter.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='counter.cpp' object='counter-counter.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-counter.o `test -f 'counter.cpp' || echo '$(srcdir)/'`counter.cpp
+
+counter-counter.obj: counter.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -MT counter-counter.obj -MD -MP -MF $(DEPDIR)/counter-counter.Tpo -c -o counter-counter.obj `if test -f 'counter.cpp'; then $(CYGPATH_W) 'counter.cpp'; else $(CYGPATH_W) '$(srcdir)/counter.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/counter-counter.Tpo $(DEPDIR)/counter-counter.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='counter.cpp' object='counter-counter.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(counter_CXXFLAGS) $(CXXFLAGS) -c -o counter-counter.obj `if test -f 'counter.cpp'; then $(CYGPATH_W) 'counter.cpp'; else $(CYGPATH_W) '$(srcdir)/counter.cpp'; fi`
+
+tables-core.o: ../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tables-core.o -MD -MP -MF $(DEPDIR)/tables-core.Tpo -c -o tables-core.o `test -f '../phrase-extract/tables-core.cpp' || echo '$(srcdir)/'`../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/tables-core.Tpo $(DEPDIR)/tables-core.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/tables-core.cpp' object='tables-core.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tables-core.o `test -f '../phrase-extract/tables-core.cpp' || echo '$(srcdir)/'`../phrase-extract/tables-core.cpp
+
+tables-core.obj: ../phrase-extract/tables-core.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT tables-core.obj -MD -MP -MF $(DEPDIR)/tables-core.Tpo -c -o tables-core.obj `if test -f '../phrase-extract/tables-core.cpp'; then $(CYGPATH_W) '../phrase-extract/tables-core.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/tables-core.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/tables-core.Tpo $(DEPDIR)/tables-core.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/tables-core.cpp' object='tables-core.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o tables-core.obj `if test -f '../phrase-extract/tables-core.cpp'; then $(CYGPATH_W) '../phrase-extract/tables-core.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/tables-core.cpp'; fi`
+
+SentenceAlignment.o: ../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT SentenceAlignment.o -MD -MP -MF $(DEPDIR)/SentenceAlignment.Tpo -c -o SentenceAlignment.o `test -f '../phrase-extract/SentenceAlignment.cpp' || echo '$(srcdir)/'`../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/SentenceAlignment.Tpo $(DEPDIR)/SentenceAlignment.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/SentenceAlignment.cpp' object='SentenceAlignment.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o SentenceAlignment.o `test -f '../phrase-extract/SentenceAlignment.cpp' || echo '$(srcdir)/'`../phrase-extract/SentenceAlignment.cpp
+
+SentenceAlignment.obj: ../phrase-extract/SentenceAlignment.cpp
+@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT SentenceAlignment.obj -MD -MP -MF $(DEPDIR)/SentenceAlignment.Tpo -c -o SentenceAlignment.obj `if test -f '../phrase-extract/SentenceAlignment.cpp'; then $(CYGPATH_W) '../phrase-extract/SentenceAlignment.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/SentenceAlignment.cpp'; fi`
+@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/SentenceAlignment.Tpo $(DEPDIR)/SentenceAlignment.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../phrase-extract/SentenceAlignment.cpp' object='SentenceAlignment.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o SentenceAlignment.obj `if test -f '../phrase-extract/SentenceAlignment.cpp'; then $(CYGPATH_W) '../phrase-extract/SentenceAlignment.cpp'; else $(CYGPATH_W) '$(srcdir)/../phrase-extract/SentenceAlignment.cpp'; fi`
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ mkid -fID $$unique
+tags: TAGS
+
+TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ set x; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ shift; \
+ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+ test -n "$$unique" || unique=$$empty_fix; \
+ if test $$# -gt 0; then \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ "$$@" $$unique; \
+ else \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ $$unique; \
+ fi; \
+ fi
+ctags: CTAGS
+CTAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ test -z "$(CTAGS_ARGS)$$unique" \
+ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+ $$unique
+
+GTAGS:
+ here=`$(am__cd) $(top_builddir) && pwd` \
+ && $(am__cd) $(top_srcdir) \
+ && gtags -i $(GTAGS_ARGS) "$$here"
+
+distclean-tags:
+ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+ $(am__remove_distdir)
+ test -d "$(distdir)" || mkdir "$(distdir)"
+ @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ list='$(DISTFILES)'; \
+ dist_files=`for file in $$list; do echo $$file; done | \
+ sed -e "s|^$$srcdirstrip/||;t" \
+ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+ case $$dist_files in \
+ */*) $(MKDIR_P) `echo "$$dist_files" | \
+ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+ sort -u` ;; \
+ esac; \
+ for file in $$dist_files; do \
+ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+ if test -d $$d/$$file; then \
+ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+ if test -d "$(distdir)/$$file"; then \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+ else \
+ test -f "$(distdir)/$$file" \
+ || cp -p $$d/$$file "$(distdir)/$$file" \
+ || exit 1; \
+ fi; \
+ done
+ -test -n "$(am__skip_mode_fix)" \
+ || find "$(distdir)" -type d ! -perm -755 \
+ -exec chmod u+rwx,go+rx {} \; -o \
+ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
+ || chmod -R a+r "$(distdir)"
+dist-gzip: distdir
+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+ $(am__remove_distdir)
+
+dist-bzip2: distdir
+ tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2
+ $(am__remove_distdir)
+
+dist-lzma: distdir
+ tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma
+ $(am__remove_distdir)
+
+dist-xz: distdir
+ tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz
+ $(am__remove_distdir)
+
+dist-tarZ: distdir
+ tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+ $(am__remove_distdir)
+
+dist-shar: distdir
+ shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+ $(am__remove_distdir)
+
+dist-zip: distdir
+ -rm -f $(distdir).zip
+ zip -rq $(distdir).zip $(distdir)
+ $(am__remove_distdir)
+
+dist dist-all: distdir
+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+ $(am__remove_distdir)
+
+# This target untars the dist file and tries a VPATH configuration. Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+distcheck: dist
+ case '$(DIST_ARCHIVES)' in \
+ *.tar.gz*) \
+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
+ *.tar.bz2*) \
+ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
+ *.tar.lzma*) \
+ lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\
+ *.tar.xz*) \
+ xz -dc $(distdir).tar.xz | $(am__untar) ;;\
+ *.tar.Z*) \
+ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+ *.shar.gz*) \
+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
+ *.zip*) \
+ unzip $(distdir).zip ;;\
+ esac
+ chmod -R a-w $(distdir); chmod a+w $(distdir)
+ mkdir $(distdir)/_build
+ mkdir $(distdir)/_inst
+ chmod a-w $(distdir)
+ test -d $(distdir)/_build || exit 0; \
+ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+ && am__cwd=`pwd` \
+ && $(am__cd) $(distdir)/_build \
+ && ../configure --srcdir=.. --prefix="$$dc_install_base" \
+ $(DISTCHECK_CONFIGURE_FLAGS) \
+ && $(MAKE) $(AM_MAKEFLAGS) \
+ && $(MAKE) $(AM_MAKEFLAGS) dvi \
+ && $(MAKE) $(AM_MAKEFLAGS) check \
+ && $(MAKE) $(AM_MAKEFLAGS) install \
+ && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+ && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+ distuninstallcheck \
+ && chmod -R a-w "$$dc_install_base" \
+ && ({ \
+ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+ } || { rm -rf "$$dc_destdir"; exit 1; }) \
+ && rm -rf "$$dc_destdir" \
+ && $(MAKE) $(AM_MAKEFLAGS) dist \
+ && rm -rf $(DIST_ARCHIVES) \
+ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
+ && cd "$$am__cwd" \
+ || exit 1
+ $(am__remove_distdir)
+ @(echo "$(distdir) archives ready for distribution: "; \
+ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
+distuninstallcheck:
+ @$(am__cd) '$(distuninstallcheck_dir)' \
+ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
+ || { echo "ERROR: files left after uninstall:" ; \
+ if test -n "$(DESTDIR)"; then \
+ echo " (check DESTDIR support)"; \
+ fi ; \
+ $(distuninstallcheck_listfiles) ; \
+ exit 1; } >&2
+distcleancheck: distclean
+ @if test '$(srcdir)' = . ; then \
+ echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+ exit 1 ; \
+ fi
+ @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+ || { echo "ERROR: files left in build directory after distclean:" ; \
+ $(distcleancheck_listfiles) ; \
+ exit 1; } >&2
+check-am: all-am
+check: check-am
+all-am: Makefile $(PROGRAMS) config.h
+installdirs:
+ for dir in "$(DESTDIR)$(bindir)"; do \
+ test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+ done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ `test -z '$(STRIP)' || \
+ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+ -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+ @echo "This command is intended for maintainers to use"
+ @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-binPROGRAMS clean-generic mostlyclean-am
+
+distclean: distclean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+ distclean-hdr distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am: install-binPROGRAMS
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+ -rm -rf $(top_srcdir)/autom4te.cache
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-binPROGRAMS
+
+.MAKE: all install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \
+ clean-binPROGRAMS clean-generic ctags dist dist-all dist-bzip2 \
+ dist-gzip dist-lzma dist-shar dist-tarZ dist-xz dist-zip \
+ distcheck distclean distclean-compile distclean-generic \
+ distclean-hdr distclean-tags distcleancheck distdir \
+ distuninstallcheck dvi dvi-am html html-am info info-am \
+ install install-am install-binPROGRAMS install-data \
+ install-data-am install-dvi install-dvi-am install-exec \
+ install-exec-am install-html install-html-am install-info \
+ install-info-am install-man install-pdf install-pdf-am \
+ install-ps install-ps-am install-strip installcheck \
+ installcheck-am installdirs maintainer-clean \
+ maintainer-clean-generic mostlyclean mostlyclean-compile \
+ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \
+ uninstall-am uninstall-binPROGRAMS
+
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/scripts/training/eppex/SafeGetline.h b/contrib/eppex/SafeGetline.h
index 6030359dc..6030359dc 100644
--- a/scripts/training/eppex/SafeGetline.h
+++ b/contrib/eppex/SafeGetline.h
diff --git a/scripts/training/eppex/aclocal.m4 b/contrib/eppex/aclocal.m4
index c0ce22463..c0ce22463 100644
--- a/scripts/training/eppex/aclocal.m4
+++ b/contrib/eppex/aclocal.m4
diff --git a/scripts/training/eppex/config.h.in b/contrib/eppex/config.h.in
index 51e717c8b..51e717c8b 100644
--- a/scripts/training/eppex/config.h.in
+++ b/contrib/eppex/config.h.in
diff --git a/scripts/training/eppex/configure b/contrib/eppex/configure
index 6aab92d7b..6aab92d7b 100755
--- a/scripts/training/eppex/configure
+++ b/contrib/eppex/configure
diff --git a/scripts/training/eppex/configure.ac b/contrib/eppex/configure.ac
index 6c64b77af..6c64b77af 100644
--- a/scripts/training/eppex/configure.ac
+++ b/contrib/eppex/configure.ac
diff --git a/scripts/training/eppex/counter.cpp b/contrib/eppex/counter.cpp
index 403e18a08..403e18a08 100644
--- a/scripts/training/eppex/counter.cpp
+++ b/contrib/eppex/counter.cpp
diff --git a/scripts/training/eppex/depcomp b/contrib/eppex/depcomp
index 04701da53..04701da53 100755
--- a/scripts/training/eppex/depcomp
+++ b/contrib/eppex/depcomp
diff --git a/scripts/training/eppex/eppex.cpp b/contrib/eppex/eppex.cpp
index d382890d2..d382890d2 100644
--- a/scripts/training/eppex/eppex.cpp
+++ b/contrib/eppex/eppex.cpp
diff --git a/scripts/training/eppex/install-sh b/contrib/eppex/install-sh
index 4d4a9519e..4d4a9519e 100755
--- a/scripts/training/eppex/install-sh
+++ b/contrib/eppex/install-sh
diff --git a/scripts/training/eppex/m4/ax_boost_base.m4 b/contrib/eppex/m4/ax_boost_base.m4
index 2e5afd091..2e5afd091 100644
--- a/scripts/training/eppex/m4/ax_boost_base.m4
+++ b/contrib/eppex/m4/ax_boost_base.m4
diff --git a/scripts/training/eppex/missing b/contrib/eppex/missing
index 894e786e1..894e786e1 100755
--- a/scripts/training/eppex/missing
+++ b/contrib/eppex/missing
diff --git a/scripts/training/eppex/phrase-extract.cpp b/contrib/eppex/phrase-extract.cpp
index 5dff43b78..5dff43b78 100644
--- a/scripts/training/eppex/phrase-extract.cpp
+++ b/contrib/eppex/phrase-extract.cpp
diff --git a/scripts/training/eppex/phrase-extract.h b/contrib/eppex/phrase-extract.h
index c01dd3ee2..c01dd3ee2 100644
--- a/scripts/training/eppex/phrase-extract.h
+++ b/contrib/eppex/phrase-extract.h
diff --git a/scripts/training/eppex/shared.cpp b/contrib/eppex/shared.cpp
index 670df1c0f..670df1c0f 100644
--- a/scripts/training/eppex/shared.cpp
+++ b/contrib/eppex/shared.cpp
diff --git a/scripts/training/eppex/shared.h b/contrib/eppex/shared.h
index 2ed70d336..2ed70d336 100644
--- a/scripts/training/eppex/shared.h
+++ b/contrib/eppex/shared.h
diff --git a/scripts/training/eppex/typedefs.h b/contrib/eppex/typedefs.h
index 0665c4aa4..0665c4aa4 100644
--- a/scripts/training/eppex/typedefs.h
+++ b/contrib/eppex/typedefs.h
diff --git a/lmserver/AUTHORS b/contrib/lmserver/AUTHORS
index 4d9dfd0cd..4d9dfd0cd 100644
--- a/lmserver/AUTHORS
+++ b/contrib/lmserver/AUTHORS
diff --git a/lmserver/BUILD b/contrib/lmserver/BUILD
index 2f4d9ace4..2f4d9ace4 100755
--- a/lmserver/BUILD
+++ b/contrib/lmserver/BUILD
diff --git a/lmserver/COPYING b/contrib/lmserver/COPYING
index cdfc50626..cdfc50626 100644
--- a/lmserver/COPYING
+++ b/contrib/lmserver/COPYING
diff --git a/lmserver/ChangeLog b/contrib/lmserver/ChangeLog
index 4083f59b8..4083f59b8 100644
--- a/lmserver/ChangeLog
+++ b/contrib/lmserver/ChangeLog
diff --git a/lmserver/INSTALL b/contrib/lmserver/INSTALL
index 81fa6ffa4..81fa6ffa4 120000
--- a/lmserver/INSTALL
+++ b/contrib/lmserver/INSTALL
diff --git a/lmserver/Makefile.am b/contrib/lmserver/Makefile.am
index ec06d0da2..ec06d0da2 100644
--- a/lmserver/Makefile.am
+++ b/contrib/lmserver/Makefile.am
diff --git a/lmserver/Makefile.in b/contrib/lmserver/Makefile.in
index 3d62eb5fd..3d62eb5fd 100644
--- a/lmserver/Makefile.in
+++ b/contrib/lmserver/Makefile.in
diff --git a/lmserver/NEWS b/contrib/lmserver/NEWS
index e69de29bb..e69de29bb 100644
--- a/lmserver/NEWS
+++ b/contrib/lmserver/NEWS
diff --git a/lmserver/README b/contrib/lmserver/README
index dd9dde134..dd9dde134 100644
--- a/lmserver/README
+++ b/contrib/lmserver/README
diff --git a/lmserver/aclocal.m4 b/contrib/lmserver/aclocal.m4
index 01ee4a076..01ee4a076 100644
--- a/lmserver/aclocal.m4
+++ b/contrib/lmserver/aclocal.m4
diff --git a/lmserver/compile b/contrib/lmserver/compile
index 1b1d23216..1b1d23216 100755
--- a/lmserver/compile
+++ b/contrib/lmserver/compile
diff --git a/lmserver/config.guess b/contrib/lmserver/config.guess
index 2313a174e..2313a174e 100755
--- a/lmserver/config.guess
+++ b/contrib/lmserver/config.guess
diff --git a/lmserver/config.h.in b/contrib/lmserver/config.h.in
index afdac98b6..afdac98b6 100644
--- a/lmserver/config.h.in
+++ b/contrib/lmserver/config.h.in
diff --git a/lmserver/config.status b/contrib/lmserver/config.status
index 490bcaf91..490bcaf91 100755
--- a/lmserver/config.status
+++ b/contrib/lmserver/config.status
diff --git a/lmserver/config.sub b/contrib/lmserver/config.sub
index ba16ebf55..ba16ebf55 100755
--- a/lmserver/config.sub
+++ b/contrib/lmserver/config.sub
diff --git a/lmserver/configure b/contrib/lmserver/configure
index 69a1a6f02..69a1a6f02 100755
--- a/lmserver/configure
+++ b/contrib/lmserver/configure
diff --git a/lmserver/configure.ac b/contrib/lmserver/configure.ac
index 62ab5dc02..62ab5dc02 100644
--- a/lmserver/configure.ac
+++ b/contrib/lmserver/configure.ac
diff --git a/lmserver/daemon.c b/contrib/lmserver/daemon.c
index 9cb7884d0..9cb7884d0 100644
--- a/lmserver/daemon.c
+++ b/contrib/lmserver/daemon.c
diff --git a/lmserver/depcomp b/contrib/lmserver/depcomp
index e5f9736c7..e5f9736c7 100755
--- a/lmserver/depcomp
+++ b/contrib/lmserver/depcomp
diff --git a/lmserver/examples/LMClient.java b/contrib/lmserver/examples/LMClient.java
index ee1a42e28..ee1a42e28 100644
--- a/lmserver/examples/LMClient.java
+++ b/contrib/lmserver/examples/LMClient.java
diff --git a/lmserver/examples/LMClient.pm b/contrib/lmserver/examples/LMClient.pm
index 78f1e03ab..78f1e03ab 100644
--- a/lmserver/examples/LMClient.pm
+++ b/contrib/lmserver/examples/LMClient.pm
diff --git a/lmserver/examples/lmclient.cc b/contrib/lmserver/examples/lmclient.cc
index 4ce5be380..4ce5be380 100644
--- a/lmserver/examples/lmclient.cc
+++ b/contrib/lmserver/examples/lmclient.cc
diff --git a/lmserver/examples/query_lmserver.pl b/contrib/lmserver/examples/query_lmserver.pl
index 3573dcf5c..3573dcf5c 100755
--- a/lmserver/examples/query_lmserver.pl
+++ b/contrib/lmserver/examples/query_lmserver.pl
diff --git a/lmserver/install-sh b/contrib/lmserver/install-sh
index a5897de6e..a5897de6e 100755
--- a/lmserver/install-sh
+++ b/contrib/lmserver/install-sh
diff --git a/lmserver/lmserver.c b/contrib/lmserver/lmserver.c
index d3aa685cc..d3aa685cc 100644
--- a/lmserver/lmserver.c
+++ b/contrib/lmserver/lmserver.c
diff --git a/lmserver/lmserver.h b/contrib/lmserver/lmserver.h
index 1afdd133b..1afdd133b 100644
--- a/lmserver/lmserver.h
+++ b/contrib/lmserver/lmserver.h
diff --git a/lmserver/missing b/contrib/lmserver/missing
index 1c8ff7049..1c8ff7049 100755
--- a/lmserver/missing
+++ b/contrib/lmserver/missing
diff --git a/lmserver/srilm.cc b/contrib/lmserver/srilm.cc
index 657bed3c4..657bed3c4 100644
--- a/lmserver/srilm.cc
+++ b/contrib/lmserver/srilm.cc
diff --git a/lmserver/srilm.h b/contrib/lmserver/srilm.h
index d9b00ef92..d9b00ef92 100644
--- a/lmserver/srilm.h
+++ b/contrib/lmserver/srilm.h
diff --git a/lmserver/stamp-h1 b/contrib/lmserver/stamp-h1
index 4547fe1b5..4547fe1b5 100644
--- a/lmserver/stamp-h1
+++ b/contrib/lmserver/stamp-h1
diff --git a/lmserver/stats.h b/contrib/lmserver/stats.h
index 895dfcd10..895dfcd10 100644
--- a/lmserver/stats.h
+++ b/contrib/lmserver/stats.h
diff --git a/lmserver/thread.c b/contrib/lmserver/thread.c
index 7fc9d0463..7fc9d0463 100644
--- a/lmserver/thread.c
+++ b/contrib/lmserver/thread.c
diff --git a/scripts/training/memscore/Makefile.am b/contrib/memscore/Makefile.am
index 1cf79f2b3..1cf79f2b3 100644
--- a/scripts/training/memscore/Makefile.am
+++ b/contrib/memscore/Makefile.am
diff --git a/contrib/memscore/Makefile.in b/contrib/memscore/Makefile.in
new file mode 100644
index 000000000..fa1f6e9f5
--- /dev/null
+++ b/contrib/memscore/Makefile.in
@@ -0,0 +1,581 @@
+# Makefile.in generated by automake 1.9.6 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005 Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+# memscore - in-memory phrase scoring for Statistical Machine Translation
+# Christian Hardmeier, FBK-irst, Trento, 2010
+# $Id$
+
+srcdir = @srcdir@
+top_srcdir = @top_srcdir@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+top_builddir = .
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+INSTALL = @INSTALL@
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+bin_PROGRAMS = memscore$(EXEEXT)
+@IRSTLM_TRUE@am__append_1 = phraselm.cpp phraselm.h
+@CHANNEL_SCORER_TRUE@am__append_2 = channel-scorer.cpp channel-scorer.h
+subdir = .
+DIST_COMMON = $(am__configure_deps) $(srcdir)/Makefile.am \
+ $(srcdir)/Makefile.in $(srcdir)/config.h.in \
+ $(top_srcdir)/configure depcomp install-sh missing
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_boost_base.m4 \
+ $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+ $(ACLOCAL_M4)
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno configure.status.lineno
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = config.h
+CONFIG_CLEAN_FILES =
+am__installdirs = "$(DESTDIR)$(bindir)"
+binPROGRAMS_INSTALL = $(INSTALL_PROGRAM)
+PROGRAMS = $(bin_PROGRAMS)
+am__memscore_SOURCES_DIST = datastorage.h memscore.h phrasetable.h \
+ scorer.h scorer-impl.h statistic.h timestamp.h phrasetable.cpp \
+ memscore.cpp scorer.cpp lexdecom.cpp lexdecom.h phraselm.cpp \
+ phraselm.h channel-scorer.cpp channel-scorer.h
+@IRSTLM_TRUE@am__objects_1 = phraselm.$(OBJEXT)
+@CHANNEL_SCORER_TRUE@am__objects_2 = channel-scorer.$(OBJEXT)
+am_memscore_OBJECTS = phrasetable.$(OBJEXT) memscore.$(OBJEXT) \
+ scorer.$(OBJEXT) lexdecom.$(OBJEXT) $(am__objects_1) \
+ $(am__objects_2)
+memscore_OBJECTS = $(am_memscore_OBJECTS)
+memscore_DEPENDENCIES =
+DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
+CXXLD = $(CXX)
+CXXLINK = $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) \
+ -o $@
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
+SOURCES = $(memscore_SOURCES)
+DIST_SOURCES = $(am__memscore_SOURCES_DIST)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+am__remove_distdir = \
+ { test ! -d $(distdir) \
+ || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \
+ && rm -fr $(distdir); }; }
+DIST_ARCHIVES = $(distdir).tar.gz
+GZIP_ENV = --best
+distuninstallcheck_listfiles = find . -type f -print
+distcleancheck_listfiles = find . -type f -print
+ACLOCAL = @ACLOCAL@
+AMDEP_FALSE = @AMDEP_FALSE@
+AMDEP_TRUE = @AMDEP_TRUE@
+AMTAR = @AMTAR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+BOOST_CPPFLAGS = @BOOST_CPPFLAGS@
+BOOST_LDFLAGS = @BOOST_LDFLAGS@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CHANNEL_SCORER_FALSE = @CHANNEL_SCORER_FALSE@
+CHANNEL_SCORER_TRUE = @CHANNEL_SCORER_TRUE@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+GREP = @GREP@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+IRSTLM_FALSE = @IRSTLM_FALSE@
+IRSTLM_TRUE = @IRSTLM_TRUE@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+am__fastdepCC_FALSE = @am__fastdepCC_FALSE@
+am__fastdepCC_TRUE = @am__fastdepCC_TRUE@
+am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@
+am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build_alias = @build_alias@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host_alias = @host_alias@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+ACLOCAL_AMFLAGS = -I m4
+AUTOMAKE_OPTIONS = foreign
+AM_CXXFLAGS = $(BOOST_CPPFLAGS) -Wall -ffast-math -ftrapping-math -fomit-frame-pointer
+memscore_SOURCES = datastorage.h memscore.h phrasetable.h scorer.h \
+ scorer-impl.h statistic.h timestamp.h phrasetable.cpp \
+ memscore.cpp scorer.cpp lexdecom.cpp lexdecom.h \
+ $(am__append_1) $(am__append_2)
+memscore_LDADD = $(IRSTLM_LIBS) $(GSL_LIBS)
+all: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-am
+
+.SUFFIXES:
+.SUFFIXES: .cpp .o .obj
+am--refresh:
+ @:
+$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
+ @for dep in $?; do \
+ case '$(am__configure_deps)' in \
+ *$$dep*) \
+ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \
+ cd $(srcdir) && $(AUTOMAKE) --foreign \
+ && exit 0; \
+ exit 1;; \
+ esac; \
+ done; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
+ cd $(top_srcdir) && \
+ $(AUTOMAKE) --foreign Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ @case '$?' in \
+ *config.status*) \
+ echo ' $(SHELL) ./config.status'; \
+ $(SHELL) ./config.status;; \
+ *) \
+ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
+ esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+ $(SHELL) ./config.status --recheck
+
+$(top_srcdir)/configure: $(am__configure_deps)
+ cd $(srcdir) && $(AUTOCONF)
+$(ACLOCAL_M4): $(am__aclocal_m4_deps)
+ cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+
+config.h: stamp-h1
+ @if test ! -f $@; then \
+ rm -f stamp-h1; \
+ $(MAKE) stamp-h1; \
+ else :; fi
+
+stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status
+ @rm -f stamp-h1
+ cd $(top_builddir) && $(SHELL) ./config.status config.h
+$(srcdir)/config.h.in: $(am__configure_deps)
+ cd $(top_srcdir) && $(AUTOHEADER)
+ rm -f stamp-h1
+ touch $@
+
+distclean-hdr:
+ -rm -f config.h stamp-h1
+install-binPROGRAMS: $(bin_PROGRAMS)
+ @$(NORMAL_INSTALL)
+ test -z "$(bindir)" || $(mkdir_p) "$(DESTDIR)$(bindir)"
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ p1=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+ if test -f $$p \
+ ; then \
+ f=`echo "$$p1" | sed 's,^.*/,,;$(transform);s/$$/$(EXEEXT)/'`; \
+ echo " $(INSTALL_PROGRAM_ENV) $(binPROGRAMS_INSTALL) '$$p' '$(DESTDIR)$(bindir)/$$f'"; \
+ $(INSTALL_PROGRAM_ENV) $(binPROGRAMS_INSTALL) "$$p" "$(DESTDIR)$(bindir)/$$f" || exit 1; \
+ else :; fi; \
+ done
+
+uninstall-binPROGRAMS:
+ @$(NORMAL_UNINSTALL)
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ f=`echo "$$p" | sed 's,^.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/'`; \
+ echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \
+ rm -f "$(DESTDIR)$(bindir)/$$f"; \
+ done
+
+clean-binPROGRAMS:
+ -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS)
+memscore$(EXEEXT): $(memscore_OBJECTS) $(memscore_DEPENDENCIES)
+ @rm -f memscore$(EXEEXT)
+ $(CXXLINK) $(memscore_LDFLAGS) $(memscore_OBJECTS) $(memscore_LDADD) $(LIBS)
+
+mostlyclean-compile:
+ -rm -f *.$(OBJEXT)
+
+distclean-compile:
+ -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/channel-scorer.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/lexdecom.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/memscore.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/phraselm.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/phrasetable.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/scorer.Po@am__quote@
+
+.cpp.o:
+@am__fastdepCXX_TRUE@ if $(CXXCOMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $<
+
+.cpp.obj:
+@am__fastdepCXX_TRUE@ if $(CXXCOMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \
+@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
+uninstall-info-am:
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ mkid -fID $$unique
+tags: TAGS
+
+TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+ test -n "$$unique" || unique=$$empty_fix; \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ $$tags $$unique; \
+ fi
+ctags: CTAGS
+CTAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ test -z "$(CTAGS_ARGS)$$tags$$unique" \
+ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+ $$tags $$unique
+
+GTAGS:
+ here=`$(am__cd) $(top_builddir) && pwd` \
+ && cd $(top_srcdir) \
+ && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+ $(am__remove_distdir)
+ mkdir $(distdir)
+ $(mkdir_p) $(distdir)/m4
+ @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \
+ list='$(DISTFILES)'; for file in $$list; do \
+ case $$file in \
+ $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+ $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \
+ esac; \
+ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+ dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \
+ if test "$$dir" != "$$file" && test "$$dir" != "."; then \
+ dir="/$$dir"; \
+ $(mkdir_p) "$(distdir)$$dir"; \
+ else \
+ dir=''; \
+ fi; \
+ if test -d $$d/$$file; then \
+ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+ fi; \
+ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+ else \
+ test -f $(distdir)/$$file \
+ || cp -p $$d/$$file $(distdir)/$$file \
+ || exit 1; \
+ fi; \
+ done
+ -find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \
+ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+ ! -type d ! -perm -444 -exec $(SHELL) $(install_sh) -c -m a+r {} {} \; \
+ || chmod -R a+r $(distdir)
+dist-gzip: distdir
+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+ $(am__remove_distdir)
+
+dist-bzip2: distdir
+ tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2
+ $(am__remove_distdir)
+
+dist-tarZ: distdir
+ tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+ $(am__remove_distdir)
+
+dist-shar: distdir
+ shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+ $(am__remove_distdir)
+
+dist-zip: distdir
+ -rm -f $(distdir).zip
+ zip -rq $(distdir).zip $(distdir)
+ $(am__remove_distdir)
+
+dist dist-all: distdir
+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+ $(am__remove_distdir)
+
+# This target untars the dist file and tries a VPATH configuration. Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+distcheck: dist
+ case '$(DIST_ARCHIVES)' in \
+ *.tar.gz*) \
+ GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\
+ *.tar.bz2*) \
+ bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\
+ *.tar.Z*) \
+ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+ *.shar.gz*) \
+ GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\
+ *.zip*) \
+ unzip $(distdir).zip ;;\
+ esac
+ chmod -R a-w $(distdir); chmod a+w $(distdir)
+ mkdir $(distdir)/_build
+ mkdir $(distdir)/_inst
+ chmod a-w $(distdir)
+ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+ && cd $(distdir)/_build \
+ && ../configure --srcdir=.. --prefix="$$dc_install_base" \
+ $(DISTCHECK_CONFIGURE_FLAGS) \
+ && $(MAKE) $(AM_MAKEFLAGS) \
+ && $(MAKE) $(AM_MAKEFLAGS) dvi \
+ && $(MAKE) $(AM_MAKEFLAGS) check \
+ && $(MAKE) $(AM_MAKEFLAGS) install \
+ && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+ && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+ distuninstallcheck \
+ && chmod -R a-w "$$dc_install_base" \
+ && ({ \
+ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+ } || { rm -rf "$$dc_destdir"; exit 1; }) \
+ && rm -rf "$$dc_destdir" \
+ && $(MAKE) $(AM_MAKEFLAGS) dist \
+ && rm -rf $(DIST_ARCHIVES) \
+ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck
+ $(am__remove_distdir)
+ @(echo "$(distdir) archives ready for distribution: "; \
+ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+ sed -e '1{h;s/./=/g;p;x;}' -e '$${p;x;}'
+distuninstallcheck:
+ @cd $(distuninstallcheck_dir) \
+ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
+ || { echo "ERROR: files left after uninstall:" ; \
+ if test -n "$(DESTDIR)"; then \
+ echo " (check DESTDIR support)"; \
+ fi ; \
+ $(distuninstallcheck_listfiles) ; \
+ exit 1; } >&2
+distcleancheck: distclean
+ @if test '$(srcdir)' = . ; then \
+ echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+ exit 1 ; \
+ fi
+ @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+ || { echo "ERROR: files left in build directory after distclean:" ; \
+ $(distcleancheck_listfiles) ; \
+ exit 1; } >&2
+check-am: all-am
+check: check-am
+all-am: Makefile $(PROGRAMS) config.h
+installdirs:
+ for dir in "$(DESTDIR)$(bindir)"; do \
+ test -z "$$dir" || $(mkdir_p) "$$dir"; \
+ done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ `test -z '$(STRIP)' || \
+ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+ @echo "This command is intended for maintainers to use"
+ @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-binPROGRAMS clean-generic mostlyclean-am
+
+distclean: distclean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+ distclean-hdr distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-exec-am: install-binPROGRAMS
+
+install-info: install-info-am
+
+install-man:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+ -rm -rf $(top_srcdir)/autom4te.cache
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-binPROGRAMS uninstall-info-am
+
+.PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \
+ clean-binPROGRAMS clean-generic ctags dist dist-all dist-bzip2 \
+ dist-gzip dist-shar dist-tarZ dist-zip distcheck distclean \
+ distclean-compile distclean-generic distclean-hdr \
+ distclean-tags distcleancheck distdir distuninstallcheck dvi \
+ dvi-am html html-am info info-am install install-am \
+ install-binPROGRAMS install-data install-data-am install-exec \
+ install-exec-am install-info install-info-am install-man \
+ install-strip installcheck installcheck-am installdirs \
+ maintainer-clean maintainer-clean-generic mostlyclean \
+ mostlyclean-compile mostlyclean-generic pdf pdf-am ps ps-am \
+ tags uninstall uninstall-am uninstall-binPROGRAMS \
+ uninstall-info-am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/scripts/training/memscore/aclocal.m4 b/contrib/memscore/aclocal.m4
index fcc48b927..fcc48b927 100644
--- a/scripts/training/memscore/aclocal.m4
+++ b/contrib/memscore/aclocal.m4
diff --git a/scripts/training/memscore/config.h.in b/contrib/memscore/config.h.in
index 05b54369c..05b54369c 100644
--- a/scripts/training/memscore/config.h.in
+++ b/contrib/memscore/config.h.in
diff --git a/scripts/training/memscore/configure b/contrib/memscore/configure
index 3849f2c29..3849f2c29 100755
--- a/scripts/training/memscore/configure
+++ b/contrib/memscore/configure
diff --git a/scripts/training/memscore/configure.ac b/contrib/memscore/configure.ac
index 23af85df2..23af85df2 100644
--- a/scripts/training/memscore/configure.ac
+++ b/contrib/memscore/configure.ac
diff --git a/scripts/training/memscore/datastorage.h b/contrib/memscore/datastorage.h
index 0eb4cded1..0eb4cded1 100644
--- a/scripts/training/memscore/datastorage.h
+++ b/contrib/memscore/datastorage.h
diff --git a/scripts/training/memscore/depcomp b/contrib/memscore/depcomp
index 04701da53..04701da53 100755
--- a/scripts/training/memscore/depcomp
+++ b/contrib/memscore/depcomp
diff --git a/scripts/training/memscore/install-sh b/contrib/memscore/install-sh
index 4d4a9519e..4d4a9519e 100755
--- a/scripts/training/memscore/install-sh
+++ b/contrib/memscore/install-sh
diff --git a/scripts/training/memscore/lexdecom.cpp b/contrib/memscore/lexdecom.cpp
index b0a47f440..b0a47f440 100644
--- a/scripts/training/memscore/lexdecom.cpp
+++ b/contrib/memscore/lexdecom.cpp
diff --git a/scripts/training/memscore/lexdecom.h b/contrib/memscore/lexdecom.h
index 1d87caf66..1d87caf66 100644
--- a/scripts/training/memscore/lexdecom.h
+++ b/contrib/memscore/lexdecom.h
diff --git a/scripts/training/memscore/m4/ax_boost_base.m4 b/contrib/memscore/m4/ax_boost_base.m4
index 2e5afd091..2e5afd091 100644
--- a/scripts/training/memscore/m4/ax_boost_base.m4
+++ b/contrib/memscore/m4/ax_boost_base.m4
diff --git a/scripts/training/memscore/memscore.cpp b/contrib/memscore/memscore.cpp
index c723b236e..c723b236e 100644
--- a/scripts/training/memscore/memscore.cpp
+++ b/contrib/memscore/memscore.cpp
diff --git a/scripts/training/memscore/memscore.h b/contrib/memscore/memscore.h
index 9b17691e1..9b17691e1 100644
--- a/scripts/training/memscore/memscore.h
+++ b/contrib/memscore/memscore.h
diff --git a/scripts/training/memscore/missing b/contrib/memscore/missing
index 894e786e1..894e786e1 100755
--- a/scripts/training/memscore/missing
+++ b/contrib/memscore/missing
diff --git a/scripts/training/memscore/phraselm.cpp b/contrib/memscore/phraselm.cpp
index 0f94f4326..0f94f4326 100644
--- a/scripts/training/memscore/phraselm.cpp
+++ b/contrib/memscore/phraselm.cpp
diff --git a/scripts/training/memscore/phraselm.h b/contrib/memscore/phraselm.h
index 62e8f08d4..62e8f08d4 100644
--- a/scripts/training/memscore/phraselm.h
+++ b/contrib/memscore/phraselm.h
diff --git a/scripts/training/memscore/phrasetable.cpp b/contrib/memscore/phrasetable.cpp
index 9f430c289..9f430c289 100644
--- a/scripts/training/memscore/phrasetable.cpp
+++ b/contrib/memscore/phrasetable.cpp
diff --git a/scripts/training/memscore/phrasetable.h b/contrib/memscore/phrasetable.h
index 14d68d702..14d68d702 100644
--- a/scripts/training/memscore/phrasetable.h
+++ b/contrib/memscore/phrasetable.h
diff --git a/scripts/training/memscore/scorer-impl.h b/contrib/memscore/scorer-impl.h
index bc544f12e..bc544f12e 100644
--- a/scripts/training/memscore/scorer-impl.h
+++ b/contrib/memscore/scorer-impl.h
diff --git a/scripts/training/memscore/scorer.cpp b/contrib/memscore/scorer.cpp
index e8cf4ce49..e8cf4ce49 100644
--- a/scripts/training/memscore/scorer.cpp
+++ b/contrib/memscore/scorer.cpp
diff --git a/scripts/training/memscore/scorer.h b/contrib/memscore/scorer.h
index 332f014c5..332f014c5 100644
--- a/scripts/training/memscore/scorer.h
+++ b/contrib/memscore/scorer.h
diff --git a/scripts/training/memscore/statistic.h b/contrib/memscore/statistic.h
index 2061a6608..2061a6608 100644
--- a/scripts/training/memscore/statistic.h
+++ b/contrib/memscore/statistic.h
diff --git a/scripts/training/memscore/timestamp.h b/contrib/memscore/timestamp.h
index fadb9cc8b..fadb9cc8b 100644
--- a/scripts/training/memscore/timestamp.h
+++ b/contrib/memscore/timestamp.h
diff --git a/scripts/moses-for-mere-mortals/READ_ME_FIRST.txt b/contrib/moses-for-mere-mortals/READ_ME_FIRST.txt
index fd5c71b18..fd5c71b18 100644
--- a/scripts/moses-for-mere-mortals/READ_ME_FIRST.txt
+++ b/contrib/moses-for-mere-mortals/READ_ME_FIRST.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py
index 67fbec0f7..67fbec0f7 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.py
diff --git a/scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py
index 93e19edf2..93e19edf2 100644
--- a/scripts/other/Extract_TMX_Corpus/Extract_TMX_Corpus.rsrc.py
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/Extract_TMX_Corpus.rsrc.py
diff --git a/scripts/other/Extract_TMX_Corpus/LanguageCodes.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt
index 22ca66c73..22ca66c73 100644
--- a/scripts/other/Extract_TMX_Corpus/LanguageCodes.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguageCodes.txt
diff --git a/scripts/other/Extract_TMX_Corpus/LanguagePairs.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt
index d2ffd094e..d2ffd094e 100644
--- a/scripts/other/Extract_TMX_Corpus/LanguagePairs.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/LanguagePairs.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt
index c84dd4ac4..c84dd4ac4 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/_READ_ME_FIRST.txt
diff --git a/scripts/other/Extract_TMX_Corpus/gpl.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt
index 818433ecc..818433ecc 100644
--- a/scripts/other/Extract_TMX_Corpus/gpl.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Extract_TMX_Corpus-1.043/gpl.txt
diff --git a/scripts/other/Moses2TMX/LanguageCodes.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt
index 22ca66c73..22ca66c73 100644
--- a/scripts/other/Moses2TMX/LanguageCodes.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/LanguageCodes.txt
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py
index 43ec3c78c..43ec3c78c 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.py
diff --git a/scripts/other/Moses2TMX/Moses2TMX.rsrc.py b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py
index dc1570c7f..dc1570c7f 100644
--- a/scripts/other/Moses2TMX/Moses2TMX.rsrc.py
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/Moses2TMX.rsrc.py
diff --git a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt
index d661dd136..d661dd136 100644
--- a/scripts/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/_READ_ME_FIRST.txt
diff --git a/scripts/other/Moses2TMX/gpl.txt b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt
index 818433ecc..818433ecc 100644
--- a/scripts/other/Moses2TMX/gpl.txt
+++ b/contrib/moses-for-mere-mortals/Windows-add-ins/Moses2TMX-1.032/gpl.txt
diff --git a/scripts/moses-for-mere-mortals/all.css b/contrib/moses-for-mere-mortals/all.css
index e83379dd4..e83379dd4 100644
--- a/scripts/moses-for-mere-mortals/all.css
+++ b/contrib/moses-for-mere-mortals/all.css
diff --git a/scripts/moses-for-mere-mortals/docs/Help-Tutorial.doc b/contrib/moses-for-mere-mortals/docs/Help-Tutorial.doc
index 2d03240c4..2d03240c4 100644
--- a/scripts/moses-for-mere-mortals/docs/Help-Tutorial.doc
+++ b/contrib/moses-for-mere-mortals/docs/Help-Tutorial.doc
Binary files differ
diff --git a/scripts/moses-for-mere-mortals/docs/Overview.jpeg b/contrib/moses-for-mere-mortals/docs/Overview.jpeg
index 9b4b1b0f2..9b4b1b0f2 100644
--- a/scripts/moses-for-mere-mortals/docs/Overview.jpeg
+++ b/contrib/moses-for-mere-mortals/docs/Overview.jpeg
Binary files differ
diff --git a/scripts/moses-for-mere-mortals/docs/Quick-Start-Guide.doc b/contrib/moses-for-mere-mortals/docs/Quick-Start-Guide.doc
index eac30c951..eac30c951 100644
--- a/scripts/moses-for-mere-mortals/docs/Quick-Start-Guide.doc
+++ b/contrib/moses-for-mere-mortals/docs/Quick-Start-Guide.doc
Binary files differ
diff --git a/scripts/moses-for-mere-mortals/docs/all.css b/contrib/moses-for-mere-mortals/docs/all.css
index e83379dd4..e83379dd4 100644
--- a/scripts/moses-for-mere-mortals/docs/all.css
+++ b/contrib/moses-for-mere-mortals/docs/all.css
diff --git a/scripts/moses-for-mere-mortals/docs/thanks.html b/contrib/moses-for-mere-mortals/docs/thanks.html
index afddba9f8..afddba9f8 100644
--- a/scripts/moses-for-mere-mortals/docs/thanks.html
+++ b/contrib/moses-for-mere-mortals/docs/thanks.html
diff --git a/scripts/moses-for-mere-mortals/index.html b/contrib/moses-for-mere-mortals/index.html
index 537e4db99..537e4db99 100644
--- a/scripts/moses-for-mere-mortals/index.html
+++ b/contrib/moses-for-mere-mortals/index.html
diff --git a/scripts/moses-for-mere-mortals/scripts/create-1.37 b/contrib/moses-for-mere-mortals/scripts/create-1.37
index 932eb5e92..932eb5e92 100644
--- a/scripts/moses-for-mere-mortals/scripts/create-1.37
+++ b/contrib/moses-for-mere-mortals/scripts/create-1.37
diff --git a/scripts/moses-for-mere-mortals/scripts/make-test-files-0.14 b/contrib/moses-for-mere-mortals/scripts/make-test-files-0.14
index c0807496d..c0807496d 100644
--- a/scripts/moses-for-mere-mortals/scripts/make-test-files-0.14
+++ b/contrib/moses-for-mere-mortals/scripts/make-test-files-0.14
diff --git a/scripts/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST b/contrib/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST
index d63fbea8e..d63fbea8e 100644
--- a/scripts/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST
+++ b/contrib/moses-for-mere-mortals/scripts/modified-scripts/READ_ME_FIRST
diff --git a/scripts/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl b/contrib/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl
index bab8532cc..bab8532cc 100644
--- a/scripts/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl
+++ b/contrib/moses-for-mere-mortals/scripts/modified-scripts/mert-moses-new-modif.pl
diff --git a/scripts/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt b/contrib/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt
index a50e7245d..a50e7245d 100644
--- a/scripts/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt
+++ b/contrib/moses-for-mere-mortals/scripts/modified-scripts/nonbreaking_prefix.pt
diff --git a/scripts/moses-for-mere-mortals/scripts/score-0.85 b/contrib/moses-for-mere-mortals/scripts/score-0.85
index ebe161feb..ebe161feb 100644
--- a/scripts/moses-for-mere-mortals/scripts/score-0.85
+++ b/contrib/moses-for-mere-mortals/scripts/score-0.85
diff --git a/scripts/moses-for-mere-mortals/scripts/train-1.11 b/contrib/moses-for-mere-mortals/scripts/train-1.11
index dc65cf5d6..dc65cf5d6 100644
--- a/scripts/moses-for-mere-mortals/scripts/train-1.11
+++ b/contrib/moses-for-mere-mortals/scripts/train-1.11
diff --git a/scripts/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07 b/contrib/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07
index ab593a678..ab593a678 100644
--- a/scripts/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07
+++ b/contrib/moses-for-mere-mortals/scripts/transfer-training-to-another-location-0.07
diff --git a/scripts/moses-for-mere-mortals/scripts/translate-1.32 b/contrib/moses-for-mere-mortals/scripts/translate-1.32
index 4f5c14052..4f5c14052 100644
--- a/scripts/moses-for-mere-mortals/scripts/translate-1.32
+++ b/contrib/moses-for-mere-mortals/scripts/translate-1.32
diff --git a/CreateOnDisk/CreateOnDisk.vcxproj b/contrib/other-builds/CreateOnDisk.vcxproj
index 7d2e1ea0e..7d2e1ea0e 100644
--- a/CreateOnDisk/CreateOnDisk.vcxproj
+++ b/contrib/other-builds/CreateOnDisk.vcxproj
diff --git a/CreateOnDisk/CreateOnDisk.xcodeproj/project.pbxproj b/contrib/other-builds/CreateOnDisk.xcodeproj/project.pbxproj
index d86b94fdf..d86b94fdf 100644
--- a/CreateOnDisk/CreateOnDisk.xcodeproj/project.pbxproj
+++ b/contrib/other-builds/CreateOnDisk.xcodeproj/project.pbxproj
diff --git a/OnDiskPt/OnDiskPt.vcxproj b/contrib/other-builds/OnDiskPt.vcxproj
index 2b1ea574b..2b1ea574b 100644
--- a/OnDiskPt/OnDiskPt.vcxproj
+++ b/contrib/other-builds/OnDiskPt.vcxproj
diff --git a/OnDiskPt/OnDiskPt.xcodeproj/project.pbxproj b/contrib/other-builds/OnDiskPt.xcodeproj/project.pbxproj
index 09ebe65bd..de24f4c5e 100644
--- a/OnDiskPt/OnDiskPt.xcodeproj/project.pbxproj
+++ b/contrib/other-builds/OnDiskPt.xcodeproj/project.pbxproj
@@ -197,6 +197,7 @@
HEADER_SEARCH_PATHS = (
/opt/local/include,
../kenlm,
+ ../,
);
INSTALL_PATH = /usr/local/lib;
PRODUCT_NAME = OnDiskPt;
@@ -212,6 +213,7 @@
HEADER_SEARCH_PATHS = (
/opt/local/include,
../kenlm,
+ ../,
);
INSTALL_PATH = /usr/local/lib;
PRODUCT_NAME = OnDiskPt;
diff --git a/moses-chart-cmd/moses-chart-cmd.vcxproj b/contrib/other-builds/moses-chart-cmd.vcxproj
index 27b260f6f..27b260f6f 100644
--- a/moses-chart-cmd/moses-chart-cmd.vcxproj
+++ b/contrib/other-builds/moses-chart-cmd.vcxproj
diff --git a/moses-chart-cmd/moses-chart-cmd.xcodeproj/project.pbxproj b/contrib/other-builds/moses-chart-cmd.xcodeproj/project.pbxproj
index 450fd0d56..68b98f760 100644
--- a/moses-chart-cmd/moses-chart-cmd.xcodeproj/project.pbxproj
+++ b/contrib/other-builds/moses-chart-cmd.xcodeproj/project.pbxproj
@@ -13,24 +13,11 @@
1E9DA31811BDC84A00F4DBD1 /* mbr.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1E9DA31111BDC84A00F4DBD1 /* mbr.cpp */; };
1E9DA31911BDC84A00F4DBD1 /* TranslationAnalysis.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1E9DA31311BDC84A00F4DBD1 /* TranslationAnalysis.cpp */; };
1E9DA35011BDC97100F4DBD1 /* libOnDiskPt.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1E9DA34F11BDC96A00F4DBD1 /* libOnDiskPt.a */; };
- 1ED8B842124B98A60030CCF4 /* libkenlm.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1E60D2A212496B1900D15873 /* libkenlm.a */; };
+ 1EE8C40B1476ABEC002496F2 /* liblm.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1EE8C3EC1476AB9B002496F2 /* liblm.a */; };
+ 1EE8C40C1476ABEC002496F2 /* libutil.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1EE8C3F31476ABAF002496F2 /* libutil.a */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
- 1E60D2A112496B1900D15873 /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 1E60D29A12496B1900D15873 /* kenlm.xcodeproj */;
- proxyType = 2;
- remoteGlobalIDString = D2AAC046055464E500DB518D;
- remoteInfo = kenlm;
- };
- 1E60D2A812496B4F00D15873 /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 1E60D29A12496B1900D15873 /* kenlm.xcodeproj */;
- proxyType = 1;
- remoteGlobalIDString = D2AAC045055464E500DB518D;
- remoteInfo = kenlm;
- };
1E87F08C11BDCD1B0033951C /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 1E9DA33311BDC8BB00F4DBD1 /* moses.xcodeproj */;
@@ -59,6 +46,20 @@
remoteGlobalIDString = D2AAC045055464E500DB518D;
remoteInfo = OnDiskPt;
};
+ 1EE8C3EB1476AB9B002496F2 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 1EE8C3E71476AB9B002496F2 /* lm.xcodeproj */;
+ proxyType = 2;
+ remoteGlobalIDString = 1EE8C2E91476A48E002496F2;
+ remoteInfo = lm;
+ };
+ 1EE8C3F21476ABAF002496F2 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 1EE8C3EE1476ABAE002496F2 /* util.xcodeproj */;
+ proxyType = 2;
+ remoteGlobalIDString = 1EE8C2711476A262002496F2;
+ remoteInfo = util;
+ };
/* End PBXContainerItemProxy section */
/* Begin PBXCopyFilesBuildPhase section */
@@ -74,7 +75,6 @@
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
- 1E60D29A12496B1900D15873 /* kenlm.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = kenlm.xcodeproj; path = ../kenlm/kenlm.xcodeproj; sourceTree = SOURCE_ROOT; };
1E9DA30A11BDC84A00F4DBD1 /* IOWrapper.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = IOWrapper.cpp; path = src/IOWrapper.cpp; sourceTree = "<group>"; };
1E9DA30B11BDC84A00F4DBD1 /* IOWrapper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = IOWrapper.h; path = src/IOWrapper.h; sourceTree = "<group>"; };
1E9DA30C11BDC84A00F4DBD1 /* Main.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Main.cpp; path = src/Main.cpp; sourceTree = "<group>"; };
@@ -85,6 +85,8 @@
1E9DA31411BDC84A00F4DBD1 /* TranslationAnalysis.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TranslationAnalysis.h; path = src/TranslationAnalysis.h; sourceTree = "<group>"; };
1E9DA33311BDC8BB00F4DBD1 /* moses.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = moses.xcodeproj; path = ../moses/moses.xcodeproj; sourceTree = SOURCE_ROOT; };
1E9DA34A11BDC96A00F4DBD1 /* OnDiskPt.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = OnDiskPt.xcodeproj; path = ../OnDiskPt/OnDiskPt.xcodeproj; sourceTree = SOURCE_ROOT; };
+ 1EE8C3E71476AB9B002496F2 /* lm.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = lm.xcodeproj; path = ../lm/lm.xcodeproj; sourceTree = "<group>"; };
+ 1EE8C3EE1476ABAE002496F2 /* util.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = util.xcodeproj; path = ../util/util.xcodeproj; sourceTree = "<group>"; };
8DD76F6C0486A84900D96B5E /* moses-chart-cmd */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = "moses-chart-cmd"; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
@@ -93,7 +95,8 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
- 1ED8B842124B98A60030CCF4 /* libkenlm.a in Frameworks */,
+ 1EE8C40B1476ABEC002496F2 /* liblm.a in Frameworks */,
+ 1EE8C40C1476ABEC002496F2 /* libutil.a in Frameworks */,
1E87F09311BDCD2E0033951C /* libmoses.a in Frameworks */,
1E9DA35011BDC97100F4DBD1 /* libOnDiskPt.a in Frameworks */,
);
@@ -105,7 +108,8 @@
08FB7794FE84155DC02AAC07 /* moses-chart-cmd */ = {
isa = PBXGroup;
children = (
- 1E60D29A12496B1900D15873 /* kenlm.xcodeproj */,
+ 1EE8C3EE1476ABAE002496F2 /* util.xcodeproj */,
+ 1EE8C3E71476AB9B002496F2 /* lm.xcodeproj */,
1E9DA34A11BDC96A00F4DBD1 /* OnDiskPt.xcodeproj */,
1E9DA33311BDC8BB00F4DBD1 /* moses.xcodeproj */,
08FB7795FE84155DC02AAC07 /* Source */,
@@ -138,26 +142,34 @@
name = Products;
sourceTree = "<group>";
};
- 1E60D29B12496B1900D15873 /* Products */ = {
+ 1E9DA33411BDC8BB00F4DBD1 /* Products */ = {
isa = PBXGroup;
children = (
- 1E60D2A212496B1900D15873 /* libkenlm.a */,
+ 1E87F08D11BDCD1B0033951C /* libmoses.a */,
);
name = Products;
sourceTree = "<group>";
};
- 1E9DA33411BDC8BB00F4DBD1 /* Products */ = {
+ 1E9DA34B11BDC96A00F4DBD1 /* Products */ = {
isa = PBXGroup;
children = (
- 1E87F08D11BDCD1B0033951C /* libmoses.a */,
+ 1E9DA34F11BDC96A00F4DBD1 /* libOnDiskPt.a */,
);
name = Products;
sourceTree = "<group>";
};
- 1E9DA34B11BDC96A00F4DBD1 /* Products */ = {
+ 1EE8C3E81476AB9B002496F2 /* Products */ = {
isa = PBXGroup;
children = (
- 1E9DA34F11BDC96A00F4DBD1 /* libOnDiskPt.a */,
+ 1EE8C3EC1476AB9B002496F2 /* liblm.a */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 1EE8C3EF1476ABAE002496F2 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C3F31476ABAF002496F2 /* libutil.a */,
);
name = Products;
sourceTree = "<group>";
@@ -185,7 +197,6 @@
dependencies = (
1E9DA36411BDC9B200F4DBD1 /* PBXTargetDependency */,
1E87F09511BDCD390033951C /* PBXTargetDependency */,
- 1E60D2A912496B4F00D15873 /* PBXTargetDependency */,
);
name = "moses-chart-cmd";
productInstallPath = "$(HOME)/bin";
@@ -212,8 +223,8 @@
projectDirPath = "";
projectReferences = (
{
- ProductGroup = 1E60D29B12496B1900D15873 /* Products */;
- ProjectRef = 1E60D29A12496B1900D15873 /* kenlm.xcodeproj */;
+ ProductGroup = 1EE8C3E81476AB9B002496F2 /* Products */;
+ ProjectRef = 1EE8C3E71476AB9B002496F2 /* lm.xcodeproj */;
},
{
ProductGroup = 1E9DA33411BDC8BB00F4DBD1 /* Products */;
@@ -223,6 +234,10 @@
ProductGroup = 1E9DA34B11BDC96A00F4DBD1 /* Products */;
ProjectRef = 1E9DA34A11BDC96A00F4DBD1 /* OnDiskPt.xcodeproj */;
},
+ {
+ ProductGroup = 1EE8C3EF1476ABAE002496F2 /* Products */;
+ ProjectRef = 1EE8C3EE1476ABAE002496F2 /* util.xcodeproj */;
+ },
);
projectRoot = "";
targets = (
@@ -232,13 +247,6 @@
/* End PBXProject section */
/* Begin PBXReferenceProxy section */
- 1E60D2A212496B1900D15873 /* libkenlm.a */ = {
- isa = PBXReferenceProxy;
- fileType = archive.ar;
- path = libkenlm.a;
- remoteRef = 1E60D2A112496B1900D15873 /* PBXContainerItemProxy */;
- sourceTree = BUILT_PRODUCTS_DIR;
- };
1E87F08D11BDCD1B0033951C /* libmoses.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
@@ -253,6 +261,20 @@
remoteRef = 1E9DA34E11BDC96A00F4DBD1 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
+ 1EE8C3EC1476AB9B002496F2 /* liblm.a */ = {
+ isa = PBXReferenceProxy;
+ fileType = archive.ar;
+ path = liblm.a;
+ remoteRef = 1EE8C3EB1476AB9B002496F2 /* PBXContainerItemProxy */;
+ sourceTree = BUILT_PRODUCTS_DIR;
+ };
+ 1EE8C3F31476ABAF002496F2 /* libutil.a */ = {
+ isa = PBXReferenceProxy;
+ fileType = archive.ar;
+ path = libutil.a;
+ remoteRef = 1EE8C3F21476ABAF002496F2 /* PBXContainerItemProxy */;
+ sourceTree = BUILT_PRODUCTS_DIR;
+ };
/* End PBXReferenceProxy section */
/* Begin PBXSourcesBuildPhase section */
@@ -270,11 +292,6 @@
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
- 1E60D2A912496B4F00D15873 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- name = kenlm;
- targetProxy = 1E60D2A812496B4F00D15873 /* PBXContainerItemProxy */;
- };
1E87F09511BDCD390033951C /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = moses;
@@ -303,6 +320,7 @@
"_FILE_OFFSET_BITS=64",
);
HEADER_SEARCH_PATHS = (
+ ..,
../moses/src,
../kenlm,
/opt/local/include,
@@ -338,6 +356,7 @@
"_FILE_OFFSET_BITS=64",
);
HEADER_SEARCH_PATHS = (
+ ..,
../moses/src,
../kenlm,
/opt/local/include,
diff --git a/moses-cmd/moses-cmd.vcxproj b/contrib/other-builds/moses-cmd.vcxproj
index 0d3126b98..0d3126b98 100644
--- a/moses-cmd/moses-cmd.vcxproj
+++ b/contrib/other-builds/moses-cmd.vcxproj
diff --git a/moses-cmd/moses-cmd.xcodeproj/project.pbxproj b/contrib/other-builds/moses-cmd.xcodeproj/project.pbxproj
index 8fb7c5e04..e3e80b479 100644
--- a/moses-cmd/moses-cmd.xcodeproj/project.pbxproj
+++ b/contrib/other-builds/moses-cmd.xcodeproj/project.pbxproj
@@ -14,7 +14,8 @@
1C8CFF4F0AD68D3600FA22E2 /* TranslationAnalysis.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1C8CFF470AD68D3600FA22E2 /* TranslationAnalysis.cpp */; };
1C8CFF500AD68D3600FA22E2 /* TranslationAnalysis.h in CopyFiles */ = {isa = PBXBuildFile; fileRef = 1C8CFF480AD68D3600FA22E2 /* TranslationAnalysis.h */; };
1CE646E411679F6900EC77CC /* libOnDiskPt.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1CE646E311679F5F00EC77CC /* libOnDiskPt.a */; };
- 1EBB175F126C16B800AE6102 /* libkenlm.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1EBB175A126C169000AE6102 /* libkenlm.a */; };
+ 1EE8C2DD1476A3F2002496F2 /* libutil.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1EE8C2DC1476A34A002496F2 /* libutil.a */; };
+ 1EE8C3C91476AB64002496F2 /* liblm.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1EE8C3C81476AB3C002496F2 /* liblm.a */; };
B219B8540E93812700EAB407 /* libmoses.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 03306D670C0B240B00CA1311 /* libmoses.a */; };
B219B8580E9381AC00EAB407 /* IOWrapper.cpp in Sources */ = {isa = PBXBuildFile; fileRef = B219B8560E9381AC00EAB407 /* IOWrapper.cpp */; };
B28B1ED3110F52BB00AAD188 /* LatticeMBR.cpp in Sources */ = {isa = PBXBuildFile; fileRef = B28B1ED2110F52BB00AAD188 /* LatticeMBR.cpp */; };
@@ -49,19 +50,19 @@
remoteGlobalIDString = D2AAC045055464E500DB518D;
remoteInfo = OnDiskPt;
};
- 1EBB1759126C169000AE6102 /* PBXContainerItemProxy */ = {
+ 1EE8C2DB1476A34A002496F2 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
- containerPortal = 1EBB1752126C169000AE6102 /* kenlm.xcodeproj */;
+ containerPortal = 1EE8C2D41476A34A002496F2 /* util.xcodeproj */;
proxyType = 2;
- remoteGlobalIDString = D2AAC046055464E500DB518D;
- remoteInfo = kenlm;
+ remoteGlobalIDString = 1EE8C2711476A262002496F2;
+ remoteInfo = util;
};
- 1ED0E9661277CFC500AC18B1 /* PBXContainerItemProxy */ = {
+ 1EE8C3C71476AB3C002496F2 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
- containerPortal = 1EBB1752126C169000AE6102 /* kenlm.xcodeproj */;
- proxyType = 1;
- remoteGlobalIDString = D2AAC045055464E500DB518D;
- remoteInfo = kenlm;
+ containerPortal = 1EE8C3C01476AB3C002496F2 /* lm.xcodeproj */;
+ proxyType = 2;
+ remoteGlobalIDString = 1EE8C2E91476A48E002496F2;
+ remoteInfo = lm;
};
/* End PBXContainerItemProxy section */
@@ -89,7 +90,8 @@
1C8CFF470AD68D3600FA22E2 /* TranslationAnalysis.cpp */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.cpp; name = TranslationAnalysis.cpp; path = src/TranslationAnalysis.cpp; sourceTree = "<group>"; };
1C8CFF480AD68D3600FA22E2 /* TranslationAnalysis.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; name = TranslationAnalysis.h; path = src/TranslationAnalysis.h; sourceTree = "<group>"; };
1CE646DB11679F5F00EC77CC /* OnDiskPt.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = OnDiskPt.xcodeproj; path = ../OnDiskPt/OnDiskPt.xcodeproj; sourceTree = SOURCE_ROOT; };
- 1EBB1752126C169000AE6102 /* kenlm.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = kenlm.xcodeproj; path = ../kenlm/kenlm.xcodeproj; sourceTree = SOURCE_ROOT; };
+ 1EE8C2D41476A34A002496F2 /* util.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = util.xcodeproj; path = ../util/util.xcodeproj; sourceTree = "<group>"; };
+ 1EE8C3C01476AB3C002496F2 /* lm.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = lm.xcodeproj; path = ../lm/lm.xcodeproj; sourceTree = "<group>"; };
8DD76F6C0486A84900D96B5E /* moses-cmd */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = "moses-cmd"; sourceTree = BUILT_PRODUCTS_DIR; };
B219B8560E9381AC00EAB407 /* IOWrapper.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = IOWrapper.cpp; path = src/IOWrapper.cpp; sourceTree = "<group>"; };
B219B8570E9381AC00EAB407 /* IOWrapper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = IOWrapper.h; path = src/IOWrapper.h; sourceTree = "<group>"; };
@@ -102,7 +104,8 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
- 1EBB175F126C16B800AE6102 /* libkenlm.a in Frameworks */,
+ 1EE8C3C91476AB64002496F2 /* liblm.a in Frameworks */,
+ 1EE8C2DD1476A3F2002496F2 /* libutil.a in Frameworks */,
1CE646E411679F6900EC77CC /* libOnDiskPt.a in Frameworks */,
B219B8540E93812700EAB407 /* libmoses.a in Frameworks */,
);
@@ -122,7 +125,8 @@
08FB7794FE84155DC02AAC07 /* moses-cmd */ = {
isa = PBXGroup;
children = (
- 1EBB1752126C169000AE6102 /* kenlm.xcodeproj */,
+ 1EE8C3C01476AB3C002496F2 /* lm.xcodeproj */,
+ 1EE8C2D41476A34A002496F2 /* util.xcodeproj */,
1CE646DB11679F5F00EC77CC /* OnDiskPt.xcodeproj */,
03306D5F0C0B240B00CA1311 /* moses.xcodeproj */,
08FB7795FE84155DC02AAC07 /* Source */,
@@ -165,10 +169,18 @@
name = Products;
sourceTree = "<group>";
};
- 1EBB1753126C169000AE6102 /* Products */ = {
+ 1EE8C2D51476A34A002496F2 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C2DC1476A34A002496F2 /* libutil.a */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 1EE8C3C11476AB3C002496F2 /* Products */ = {
isa = PBXGroup;
children = (
- 1EBB175A126C169000AE6102 /* libkenlm.a */,
+ 1EE8C3C81476AB3C002496F2 /* liblm.a */,
);
name = Products;
sourceTree = "<group>";
@@ -196,7 +208,6 @@
dependencies = (
03306D780C0B244800CA1311 /* PBXTargetDependency */,
1CE6472E1167A11600EC77CC /* PBXTargetDependency */,
- 1ED0E9671277CFC500AC18B1 /* PBXTargetDependency */,
);
name = "moses-cmd";
productInstallPath = "$(HOME)/bin";
@@ -223,8 +234,8 @@
projectDirPath = "";
projectReferences = (
{
- ProductGroup = 1EBB1753126C169000AE6102 /* Products */;
- ProjectRef = 1EBB1752126C169000AE6102 /* kenlm.xcodeproj */;
+ ProductGroup = 1EE8C3C11476AB3C002496F2 /* Products */;
+ ProjectRef = 1EE8C3C01476AB3C002496F2 /* lm.xcodeproj */;
},
{
ProductGroup = 03306D600C0B240B00CA1311 /* Products */;
@@ -234,6 +245,10 @@
ProductGroup = 1CE646DC11679F5F00EC77CC /* Products */;
ProjectRef = 1CE646DB11679F5F00EC77CC /* OnDiskPt.xcodeproj */;
},
+ {
+ ProductGroup = 1EE8C2D51476A34A002496F2 /* Products */;
+ ProjectRef = 1EE8C2D41476A34A002496F2 /* util.xcodeproj */;
+ },
);
projectRoot = "";
targets = (
@@ -257,11 +272,18 @@
remoteRef = 1CE646E211679F5F00EC77CC /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
- 1EBB175A126C169000AE6102 /* libkenlm.a */ = {
+ 1EE8C2DC1476A34A002496F2 /* libutil.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
- path = libkenlm.a;
- remoteRef = 1EBB1759126C169000AE6102 /* PBXContainerItemProxy */;
+ path = libutil.a;
+ remoteRef = 1EE8C2DB1476A34A002496F2 /* PBXContainerItemProxy */;
+ sourceTree = BUILT_PRODUCTS_DIR;
+ };
+ 1EE8C3C81476AB3C002496F2 /* liblm.a */ = {
+ isa = PBXReferenceProxy;
+ fileType = archive.ar;
+ path = liblm.a;
+ remoteRef = 1EE8C3C71476AB3C002496F2 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
/* End PBXReferenceProxy section */
@@ -292,11 +314,6 @@
name = OnDiskPt;
targetProxy = 1CE6472D1167A11600EC77CC /* PBXContainerItemProxy */;
};
- 1ED0E9671277CFC500AC18B1 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- name = kenlm;
- targetProxy = 1ED0E9661277CFC500AC18B1 /* PBXContainerItemProxy */;
- };
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
@@ -317,6 +334,7 @@
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
+ ../,
../moses/src,
../kenlm,
/opt/local/include,
@@ -357,6 +375,7 @@
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
+ ../,
../moses/src,
../kenlm,
/opt/local/include,
@@ -389,6 +408,7 @@
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
+ ../,
../moses/src,
../kenlm,
/opt/local/include,
diff --git a/moses.sln b/contrib/other-builds/moses.sln
index 1e2d4eb45..1e2d4eb45 100644
--- a/moses.sln
+++ b/contrib/other-builds/moses.sln
diff --git a/moses/moses.vcxproj b/contrib/other-builds/moses.vcxproj
index 1366995f3..1366995f3 100644
--- a/moses/moses.vcxproj
+++ b/contrib/other-builds/moses.vcxproj
diff --git a/moses/moses.xcodeproj/project.pbxproj b/contrib/other-builds/moses.xcodeproj/project.pbxproj
index db743b782..018d67f7e 100644
--- a/moses/moses.xcodeproj/project.pbxproj
+++ b/contrib/other-builds/moses.xcodeproj/project.pbxproj
@@ -67,7 +67,6 @@
1E46B5A713BA5C7F0084F898 /* RuleCubeItem.h in Headers */ = {isa = PBXBuildFile; fileRef = 1E46B5A513BA5C7F0084F898 /* RuleCubeItem.h */; };
1E474E12145575CA00178AD5 /* RuleTableLoader.h in Headers */ = {isa = PBXBuildFile; fileRef = 1E474E11145575CA00178AD5 /* RuleTableLoader.h */; };
1E528B9D13A12B2D00E9A67E /* params.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1E528B9B13A12B2D00E9A67E /* params.cpp */; };
- 1E528B9E13A12B2D00E9A67E /* SyntacticLanguageModel.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1E528B9C13A12B2D00E9A67E /* SyntacticLanguageModel.cpp */; };
1EA6AB4A13BCC838004465AF /* ChartRuleLookupManager.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1EA6AB4813BCC838004465AF /* ChartRuleLookupManager.cpp */; };
1EA6AB4B13BCC838004465AF /* ChartRuleLookupManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 1EA6AB4913BCC838004465AF /* ChartRuleLookupManager.h */; };
1EBB262913A12DB500B51840 /* hash.h in Headers */ = {isa = PBXBuildFile; fileRef = 1EBB262213A12DB500B51840 /* hash.h */; };
@@ -337,7 +336,6 @@
1E46B5A513BA5C7F0084F898 /* RuleCubeItem.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RuleCubeItem.h; path = src/RuleCubeItem.h; sourceTree = "<group>"; };
1E474E11145575CA00178AD5 /* RuleTableLoader.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RuleTableLoader.h; path = src/RuleTableLoader.h; sourceTree = "<group>"; };
1E528B9B13A12B2D00E9A67E /* params.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = params.cpp; path = src/DynSAInclude/params.cpp; sourceTree = "<group>"; };
- 1E528B9C13A12B2D00E9A67E /* SyntacticLanguageModel.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = SyntacticLanguageModel.cpp; path = src/SyntacticLanguageModel.cpp; sourceTree = "<group>"; };
1EA6AB4813BCC838004465AF /* ChartRuleLookupManager.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = ChartRuleLookupManager.cpp; path = src/ChartRuleLookupManager.cpp; sourceTree = "<group>"; };
1EA6AB4913BCC838004465AF /* ChartRuleLookupManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ChartRuleLookupManager.h; path = src/ChartRuleLookupManager.h; sourceTree = "<group>"; };
1EBB262213A12DB500B51840 /* hash.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = hash.h; path = src/DynSAInclude/hash.h; sourceTree = "<group>"; };
@@ -762,7 +760,6 @@
1ED0FDFC124BB9380029177F /* SquareMatrix.h */,
1ED0FDFD124BB9380029177F /* StaticData.cpp */,
1ED0FDFE124BB9380029177F /* StaticData.h */,
- 1E528B9C13A12B2D00E9A67E /* SyntacticLanguageModel.cpp */,
1ED0FDFF124BB9380029177F /* TargetPhrase.cpp */,
1ED0FE00124BB9380029177F /* TargetPhrase.h */,
1ED0FE01124BB9380029177F /* TargetPhraseCollection.cpp */,
@@ -1184,7 +1181,6 @@
1E2E163E132A892800ED4085 /* ThreadPool.cpp in Sources */,
1EE58D9F133726C700D93158 /* NonTerminal.cpp in Sources */,
1E528B9D13A12B2D00E9A67E /* params.cpp in Sources */,
- 1E528B9E13A12B2D00E9A67E /* SyntacticLanguageModel.cpp in Sources */,
1E07291F13B3854D004454FD /* AlignmentInfoCollection.cpp in Sources */,
1E46B5A613BA5C7F0084F898 /* RuleCubeItem.cpp in Sources */,
1EA6AB4A13BCC838004465AF /* ChartRuleLookupManager.cpp in Sources */,
@@ -1232,6 +1228,7 @@
_LARGE_FILES,
);
HEADER_SEARCH_PATHS = (
+ ../,
src,
../irstlm/include,
../srilm/include,
@@ -1264,6 +1261,7 @@
_LARGE_FILES,
);
HEADER_SEARCH_PATHS = (
+ ../,
src,
../irstlm/include,
../srilm/include,
diff --git a/reranking/data/README b/contrib/reranking/data/README
index 59b20b32d..59b20b32d 100644
--- a/reranking/data/README
+++ b/contrib/reranking/data/README
diff --git a/reranking/data/nbest.small b/contrib/reranking/data/nbest.small
index 0fcbc44ce..0fcbc44ce 100644
--- a/reranking/data/nbest.small
+++ b/contrib/reranking/data/nbest.small
diff --git a/reranking/data/weights b/contrib/reranking/data/weights
index c6b6c1ac0..c6b6c1ac0 100644
--- a/reranking/data/weights
+++ b/contrib/reranking/data/weights
diff --git a/reranking/src/Hypo.cpp b/contrib/reranking/src/Hypo.cpp
index 0ceb21abd..0ceb21abd 100644
--- a/reranking/src/Hypo.cpp
+++ b/contrib/reranking/src/Hypo.cpp
diff --git a/reranking/src/Hypo.h b/contrib/reranking/src/Hypo.h
index a85410289..a85410289 100644
--- a/reranking/src/Hypo.h
+++ b/contrib/reranking/src/Hypo.h
diff --git a/reranking/src/Main.cpp b/contrib/reranking/src/Main.cpp
index a91be8e73..a91be8e73 100644
--- a/reranking/src/Main.cpp
+++ b/contrib/reranking/src/Main.cpp
diff --git a/reranking/src/Makefile b/contrib/reranking/src/Makefile
index 7b0ec6945..7b0ec6945 100644
--- a/reranking/src/Makefile
+++ b/contrib/reranking/src/Makefile
diff --git a/reranking/src/NBest.cpp b/contrib/reranking/src/NBest.cpp
index 24a0f60c3..24a0f60c3 100644
--- a/reranking/src/NBest.cpp
+++ b/contrib/reranking/src/NBest.cpp
diff --git a/reranking/src/NBest.h b/contrib/reranking/src/NBest.h
index 9a4aa9447..9a4aa9447 100644
--- a/reranking/src/NBest.h
+++ b/contrib/reranking/src/NBest.h
diff --git a/reranking/src/ParameterNBest.cpp b/contrib/reranking/src/ParameterNBest.cpp
index 005f3890c..005f3890c 100644
--- a/reranking/src/ParameterNBest.cpp
+++ b/contrib/reranking/src/ParameterNBest.cpp
diff --git a/reranking/src/ParameterNBest.h b/contrib/reranking/src/ParameterNBest.h
index bc554d4b9..bc554d4b9 100644
--- a/reranking/src/ParameterNBest.h
+++ b/contrib/reranking/src/ParameterNBest.h
diff --git a/reranking/src/Tools.cpp b/contrib/reranking/src/Tools.cpp
index 8312c3370..8312c3370 100644
--- a/reranking/src/Tools.cpp
+++ b/contrib/reranking/src/Tools.cpp
diff --git a/reranking/src/Tools.h b/contrib/reranking/src/Tools.h
index eb71746b0..eb71746b0 100644
--- a/reranking/src/Tools.h
+++ b/contrib/reranking/src/Tools.h
diff --git a/contrib/server/Jamfile b/contrib/server/Jamfile
new file mode 100644
index 000000000..211ab05f9
--- /dev/null
+++ b/contrib/server/Jamfile
@@ -0,0 +1,41 @@
+#If you get compilation errors here, make sure you have xmlrpc-c installed properly, including the abyss server option.
+
+import option ;
+import path ;
+
+with-xmlrpc-c = [ option.get "with-xmlrpc-c" ] ;
+if $(with-xmlrpc-c) {
+ build-moses-server = true ;
+ xmlrpc-command = $(with-xmlrpc-c)/bin/xmlrpc-c-config ;
+ if ! [ path.exists $(xmlrpc-command) ] {
+ exit Could not find $(xmlrpc-command) : 1 ;
+ }
+} else {
+ xmlrpc-check = [ _shell "xmlrpc-c-config --features 2>/dev/null" : exit-status ] ;
+ if $(xmlrpc-check[2]) = 0 {
+ if [ MATCH "(abyss-server)" : $(xmlrpc-check[1]) ] {
+ build-moses-server = true ;
+ } else {
+ echo "Found xmlrpc-c but it does not have abyss-server. Skipping mosesserver." ;
+ }
+ }
+ xmlrpc-command = "xmlrpc-c-config" ;
+}
+
+rule shell_or_die ( cmd ) {
+ local ret = [ _shell $(cmd) : exit-status ] ;
+ if $(ret[2]) != 0 {
+ exit "Failed to run $(cmd)" : 1 ;
+ }
+ return $(ret[1]) ;
+}
+
+if $(build-moses-server) = true
+{
+ xmlrpc-linkflags = [ shell_or_die "$(xmlrpc-command) c++2 abyss-server --libs" ] ;
+ xmlrpc-cxxflags = [ shell_or_die "$(xmlrpc-command) c++2 abyss-server --cflags" ] ;
+
+ exe mosesserver : mosesserver.cpp ../../moses/src//moses ../../OnDiskPt//OnDiskPt : <linkflags>$(xmlrpc-linkflags) <cxxflags>$(xmlrpc-cxxflags) ;
+} else {
+ alias mosesserver ;
+}
diff --git a/server/client.perl b/contrib/server/client.perl
index eca5e388f..eca5e388f 100755
--- a/server/client.perl
+++ b/contrib/server/client.perl
diff --git a/server/mosesserver.cpp b/contrib/server/mosesserver.cpp
index 7577837fb..c3459a59e 100644
--- a/server/mosesserver.cpp
+++ b/contrib/server/mosesserver.cpp
@@ -1,4 +1,4 @@
-#include <cassert>
+#include "util/check.hh"
#include <stdexcept>
#include <iostream>
@@ -182,7 +182,7 @@ public:
const TranslationSystem& system = getTranslationSystem(params);
- Sentence sentence(Input);
+ Sentence sentence;
const vector<FactorType> &inputFactorOrder =
staticData.GetInputFactorOrder();
stringstream in(source + "\n");
@@ -217,7 +217,7 @@ public:
void outputHypo(ostream& out, const Hypothesis* hypo, bool addAlignmentInfo, vector<xmlrpc_c::value>& alignInfo, bool reportAllFactors = false) {
if (hypo->GetPrevHypo() != NULL) {
outputHypo(out,hypo->GetPrevHypo(),addAlignmentInfo, alignInfo, reportAllFactors);
- Phrase p = hypo->GetTargetPhrase();
+ Phrase p = hypo->GetCurrTargetPhrase();
if(reportAllFactors) {
out << p << " ";
} else {
@@ -376,6 +376,6 @@ int main(int argc, char** argv)
myAbyssServer.run();
}
// xmlrpc_c::serverAbyss.run() never returns
- assert(false);
+ CHECK(false);
return 0;
}
diff --git a/server/sgclient.perl b/contrib/server/sgclient.perl
index 9a23ad8f4..9a23ad8f4 100755
--- a/server/sgclient.perl
+++ b/contrib/server/sgclient.perl
diff --git a/sigtest-filter/Makefile b/contrib/sigtest-filter/Makefile
index ddefc907b..ddefc907b 100644
--- a/sigtest-filter/Makefile
+++ b/contrib/sigtest-filter/Makefile
diff --git a/sigtest-filter/README.txt b/contrib/sigtest-filter/README.txt
index b21129b89..b21129b89 100644
--- a/sigtest-filter/README.txt
+++ b/contrib/sigtest-filter/README.txt
diff --git a/sigtest-filter/WIN32_functions.cpp b/contrib/sigtest-filter/WIN32_functions.cpp
index 60ddd340c..60ddd340c 100644
--- a/sigtest-filter/WIN32_functions.cpp
+++ b/contrib/sigtest-filter/WIN32_functions.cpp
diff --git a/sigtest-filter/WIN32_functions.h b/contrib/sigtest-filter/WIN32_functions.h
index 6a719392e..6a719392e 100644
--- a/sigtest-filter/WIN32_functions.h
+++ b/contrib/sigtest-filter/WIN32_functions.h
diff --git a/sigtest-filter/check-install b/contrib/sigtest-filter/check-install
index ba4f431e0..ba4f431e0 100755
--- a/sigtest-filter/check-install
+++ b/contrib/sigtest-filter/check-install
diff --git a/sigtest-filter/filter-pt.cpp b/contrib/sigtest-filter/filter-pt.cpp
index b0828ae33..b0828ae33 100644
--- a/sigtest-filter/filter-pt.cpp
+++ b/contrib/sigtest-filter/filter-pt.cpp
diff --git a/sigtest-filter/sigtest-filter.sln b/contrib/sigtest-filter/sigtest-filter.sln
index 517b06238..517b06238 100644
--- a/sigtest-filter/sigtest-filter.sln
+++ b/contrib/sigtest-filter/sigtest-filter.sln
diff --git a/sigtest-filter/sigtest-filter.vcproj b/contrib/sigtest-filter/sigtest-filter.vcproj
index a961ac61d..a961ac61d 100644
--- a/sigtest-filter/sigtest-filter.vcproj
+++ b/contrib/sigtest-filter/sigtest-filter.vcproj
diff --git a/synlm/hhmm/LICENSE b/contrib/synlm/hhmm/LICENSE
index 25b5b7097..25b5b7097 100644
--- a/synlm/hhmm/LICENSE
+++ b/contrib/synlm/hhmm/LICENSE
diff --git a/synlm/hhmm/rvtl/include/nl-archetypeset.h b/contrib/synlm/hhmm/rvtl/include/nl-archetypeset.h
index 914e85e92..914e85e92 100644
--- a/synlm/hhmm/rvtl/include/nl-archetypeset.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-archetypeset.h
diff --git a/synlm/hhmm/rvtl/include/nl-array.h b/contrib/synlm/hhmm/rvtl/include/nl-array.h
index 0dfb74b44..0dfb74b44 100644
--- a/synlm/hhmm/rvtl/include/nl-array.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-array.h
diff --git a/synlm/hhmm/rvtl/include/nl-beam.h b/contrib/synlm/hhmm/rvtl/include/nl-beam.h
index 398babe21..398babe21 100644
--- a/synlm/hhmm/rvtl/include/nl-beam.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-beam.h
diff --git a/synlm/hhmm/rvtl/include/nl-const.h b/contrib/synlm/hhmm/rvtl/include/nl-const.h
index 3d6023f39..3d6023f39 100644
--- a/synlm/hhmm/rvtl/include/nl-const.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-const.h
diff --git a/synlm/hhmm/rvtl/include/nl-cpt.h b/contrib/synlm/hhmm/rvtl/include/nl-cpt.h
index 67a1a1021..67a1a1021 100644
--- a/synlm/hhmm/rvtl/include/nl-cpt.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-cpt.h
diff --git a/synlm/hhmm/rvtl/include/nl-crf.h b/contrib/synlm/hhmm/rvtl/include/nl-crf.h
index 44744ad03..44744ad03 100644
--- a/synlm/hhmm/rvtl/include/nl-crf.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-crf.h
diff --git a/synlm/hhmm/rvtl/include/nl-denot.h b/contrib/synlm/hhmm/rvtl/include/nl-denot.h
index 0b50663a1..0b50663a1 100644
--- a/synlm/hhmm/rvtl/include/nl-denot.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-denot.h
diff --git a/synlm/hhmm/rvtl/include/nl-dtree-cont.h b/contrib/synlm/hhmm/rvtl/include/nl-dtree-cont.h
index cf6b00d28..cf6b00d28 100644
--- a/synlm/hhmm/rvtl/include/nl-dtree-cont.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-dtree-cont.h
diff --git a/synlm/hhmm/rvtl/include/nl-dtree.h b/contrib/synlm/hhmm/rvtl/include/nl-dtree.h
index 2396f395c..2396f395c 100644
--- a/synlm/hhmm/rvtl/include/nl-dtree.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-dtree.h
diff --git a/synlm/hhmm/rvtl/include/nl-fixedmatrix.h b/contrib/synlm/hhmm/rvtl/include/nl-fixedmatrix.h
index dbb9d9d9d..dbb9d9d9d 100644
--- a/synlm/hhmm/rvtl/include/nl-fixedmatrix.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-fixedmatrix.h
diff --git a/synlm/hhmm/rvtl/include/nl-gauss.h b/contrib/synlm/hhmm/rvtl/include/nl-gauss.h
index a2213086f..a2213086f 100644
--- a/synlm/hhmm/rvtl/include/nl-gauss.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-gauss.h
diff --git a/synlm/hhmm/rvtl/include/nl-hash.h b/contrib/synlm/hhmm/rvtl/include/nl-hash.h
index 809284db9..809284db9 100644
--- a/synlm/hhmm/rvtl/include/nl-hash.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-hash.h
diff --git a/synlm/hhmm/rvtl/include/nl-heap.h b/contrib/synlm/hhmm/rvtl/include/nl-heap.h
index 31be6ed75..31be6ed75 100644
--- a/synlm/hhmm/rvtl/include/nl-heap.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-heap.h
diff --git a/synlm/hhmm/rvtl/include/nl-hmm.h b/contrib/synlm/hhmm/rvtl/include/nl-hmm.h
index 2f6cd0104..2f6cd0104 100644
--- a/synlm/hhmm/rvtl/include/nl-hmm.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-hmm.h
diff --git a/synlm/hhmm/rvtl/include/nl-hmm2.h b/contrib/synlm/hhmm/rvtl/include/nl-hmm2.h
index 711d589be..711d589be 100644
--- a/synlm/hhmm/rvtl/include/nl-hmm2.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-hmm2.h
diff --git a/synlm/hhmm/rvtl/include/nl-hmmloop.h b/contrib/synlm/hhmm/rvtl/include/nl-hmmloop.h
index c476b4271..c476b4271 100644
--- a/synlm/hhmm/rvtl/include/nl-hmmloop.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-hmmloop.h
diff --git a/synlm/hhmm/rvtl/include/nl-iomacros.h b/contrib/synlm/hhmm/rvtl/include/nl-iomacros.h
index ccb2eb095..ccb2eb095 100644
--- a/synlm/hhmm/rvtl/include/nl-iomacros.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-iomacros.h
diff --git a/synlm/hhmm/rvtl/include/nl-linsep.h b/contrib/synlm/hhmm/rvtl/include/nl-linsep.h
index ac3ef3312..ac3ef3312 100644
--- a/synlm/hhmm/rvtl/include/nl-linsep.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-linsep.h
diff --git a/synlm/hhmm/rvtl/include/nl-list.h b/contrib/synlm/hhmm/rvtl/include/nl-list.h
index 65f07f8b5..65f07f8b5 100644
--- a/synlm/hhmm/rvtl/include/nl-list.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-list.h
diff --git a/synlm/hhmm/rvtl/include/nl-matrix.h b/contrib/synlm/hhmm/rvtl/include/nl-matrix.h
index c8c18f8e1..c8c18f8e1 100644
--- a/synlm/hhmm/rvtl/include/nl-matrix.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-matrix.h
diff --git a/synlm/hhmm/rvtl/include/nl-mixture.h b/contrib/synlm/hhmm/rvtl/include/nl-mixture.h
index 2da5aacb2..2da5aacb2 100644
--- a/synlm/hhmm/rvtl/include/nl-mixture.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-mixture.h
diff --git a/synlm/hhmm/rvtl/include/nl-modelfile.h b/contrib/synlm/hhmm/rvtl/include/nl-modelfile.h
index dc6bec487..dc6bec487 100644
--- a/synlm/hhmm/rvtl/include/nl-modelfile.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-modelfile.h
diff --git a/synlm/hhmm/rvtl/include/nl-oblidtree.h b/contrib/synlm/hhmm/rvtl/include/nl-oblidtree.h
index 24c82e313..24c82e313 100644
--- a/synlm/hhmm/rvtl/include/nl-oblidtree.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-oblidtree.h
diff --git a/synlm/hhmm/rvtl/include/nl-prob.h b/contrib/synlm/hhmm/rvtl/include/nl-prob.h
index 76cf2fb57..76cf2fb57 100644
--- a/synlm/hhmm/rvtl/include/nl-prob.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-prob.h
diff --git a/synlm/hhmm/rvtl/include/nl-probmodel.h b/contrib/synlm/hhmm/rvtl/include/nl-probmodel.h
index 2dcff7b30..2dcff7b30 100644
--- a/synlm/hhmm/rvtl/include/nl-probmodel.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-probmodel.h
diff --git a/synlm/hhmm/rvtl/include/nl-racpt.h b/contrib/synlm/hhmm/rvtl/include/nl-racpt.h
index 5d1502f1f..5d1502f1f 100644
--- a/synlm/hhmm/rvtl/include/nl-racpt.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-racpt.h
diff --git a/synlm/hhmm/rvtl/include/nl-randvar.h b/contrib/synlm/hhmm/rvtl/include/nl-randvar.h
index 66cc0b8f2..66cc0b8f2 100644
--- a/synlm/hhmm/rvtl/include/nl-randvar.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-randvar.h
diff --git a/synlm/hhmm/rvtl/include/nl-refrv.h b/contrib/synlm/hhmm/rvtl/include/nl-refrv.h
index 1e4eca4fd..1e4eca4fd 100644
--- a/synlm/hhmm/rvtl/include/nl-refrv.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-refrv.h
diff --git a/synlm/hhmm/rvtl/include/nl-safeids.h b/contrib/synlm/hhmm/rvtl/include/nl-safeids.h
index 50837c366..50837c366 100644
--- a/synlm/hhmm/rvtl/include/nl-safeids.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-safeids.h
diff --git a/synlm/hhmm/rvtl/include/nl-stream.h b/contrib/synlm/hhmm/rvtl/include/nl-stream.h
index 8f743e12b..8f743e12b 100644
--- a/synlm/hhmm/rvtl/include/nl-stream.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-stream.h
diff --git a/synlm/hhmm/rvtl/include/nl-string.h b/contrib/synlm/hhmm/rvtl/include/nl-string.h
index ce3f68aa3..ce3f68aa3 100644
--- a/synlm/hhmm/rvtl/include/nl-string.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-string.h
diff --git a/synlm/hhmm/rvtl/include/nl-stringindex.h b/contrib/synlm/hhmm/rvtl/include/nl-stringindex.h
index 22931f081..22931f081 100644
--- a/synlm/hhmm/rvtl/include/nl-stringindex.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-stringindex.h
diff --git a/synlm/hhmm/rvtl/include/nl-tetrahex.h b/contrib/synlm/hhmm/rvtl/include/nl-tetrahex.h
index d77e4f471..d77e4f471 100644
--- a/synlm/hhmm/rvtl/include/nl-tetrahex.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-tetrahex.h
diff --git a/synlm/hhmm/rvtl/include/nl-timer.h b/contrib/synlm/hhmm/rvtl/include/nl-timer.h
index 3fa7c5387..3fa7c5387 100644
--- a/synlm/hhmm/rvtl/include/nl-timer.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-timer.h
diff --git a/synlm/hhmm/rvtl/include/nl-tree.h b/contrib/synlm/hhmm/rvtl/include/nl-tree.h
index 102c5c0b3..102c5c0b3 100644
--- a/synlm/hhmm/rvtl/include/nl-tree.h
+++ b/contrib/synlm/hhmm/rvtl/include/nl-tree.h
diff --git a/synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h b/contrib/synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h
index 3987d6969..3987d6969 100644
--- a/synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h
+++ b/contrib/synlm/hhmm/wsjparse/include/HHMMLangModel-gf.h
diff --git a/synlm/hhmm/wsjparse/include/TextObsModel.h b/contrib/synlm/hhmm/wsjparse/include/TextObsModel.h
index 5e277e857..5e277e857 100644
--- a/synlm/hhmm/wsjparse/include/TextObsModel.h
+++ b/contrib/synlm/hhmm/wsjparse/include/TextObsModel.h
diff --git a/synlm/hhmm/wsjparse/include/TextObsVars.h b/contrib/synlm/hhmm/wsjparse/include/TextObsVars.h
index c32a6cbc1..c32a6cbc1 100644
--- a/synlm/hhmm/wsjparse/include/TextObsVars.h
+++ b/contrib/synlm/hhmm/wsjparse/include/TextObsVars.h
diff --git a/web/bin/daemon.pl b/contrib/web/bin/daemon.pl
index 8e6a08739..8e6a08739 100644
--- a/web/bin/daemon.pl
+++ b/contrib/web/bin/daemon.pl
diff --git a/web/bin/detokenizer.perl b/contrib/web/bin/detokenizer.perl
index 4d53aeddd..4d53aeddd 100644
--- a/web/bin/detokenizer.perl
+++ b/contrib/web/bin/detokenizer.perl
diff --git a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.de b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.de
index c24f2080f..c24f2080f 100644
--- a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.de
+++ b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.de
diff --git a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.el b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.el
index 7bb3d490a..7bb3d490a 100644
--- a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.el
+++ b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.el
diff --git a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.en b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.en
index 7e7a8ce2e..7e7a8ce2e 100644
--- a/web/bin/nonbreaking_prefixes/nonbreaking_prefix.en
+++ b/contrib/web/bin/nonbreaking_prefixes/nonbreaking_prefix.en
diff --git a/web/bin/start-daemon-cluster.pl b/contrib/web/bin/start-daemon-cluster.pl
index 7bf87842c..7bf87842c 100644
--- a/web/bin/start-daemon-cluster.pl
+++ b/contrib/web/bin/start-daemon-cluster.pl
diff --git a/web/bin/tokenizer.perl b/contrib/web/bin/tokenizer.perl
index 4e1f5ac05..4e1f5ac05 100644
--- a/web/bin/tokenizer.perl
+++ b/contrib/web/bin/tokenizer.perl
diff --git a/web/index.cgi b/contrib/web/index.cgi
index dcc20b0aa..dcc20b0aa 100644
--- a/web/index.cgi
+++ b/contrib/web/index.cgi
diff --git a/web/index.js b/contrib/web/index.js
index 1d917ce73..1d917ce73 100644
--- a/web/index.js
+++ b/contrib/web/index.js
diff --git a/web/lib/RemoteProcess.pm b/contrib/web/lib/RemoteProcess.pm
index 1a414cd77..1a414cd77 100644
--- a/web/lib/RemoteProcess.pm
+++ b/contrib/web/lib/RemoteProcess.pm
diff --git a/web/lib/Subprocess.pm b/contrib/web/lib/Subprocess.pm
index adc7a853a..adc7a853a 100644
--- a/web/lib/Subprocess.pm
+++ b/contrib/web/lib/Subprocess.pm
diff --git a/web/translate.cgi b/contrib/web/translate.cgi
index ece7109fc..ece7109fc 100644
--- a/web/translate.cgi
+++ b/contrib/web/translate.cgi
diff --git a/cruise-control/config.ems b/cruise-control/config.ems
index e783b9dfb..eee15ec72 100644
--- a/cruise-control/config.ems
+++ b/cruise-control/config.ems
@@ -30,10 +30,10 @@ toy-data = $moses-script-dir/ems/example/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-cmd/src/moses
+decoder = $moses-src-dir/dist/bin/moses
# conversion of phrase table into binary on-disk format
-ttable-binarizer = $moses-src-dir/misc/processPhraseTable
+ttable-binarizer = $moses-src-dir/dist/bin/processPhraseTable
# conversion of rule table into binary on-disk format
#ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
diff --git a/cruise-control/test_all_new_commits.sh b/cruise-control/test_all_new_commits.sh
index eec6d9d4b..c0039c7eb 100755
--- a/cruise-control/test_all_new_commits.sh
+++ b/cruise-control/test_all_new_commits.sh
@@ -85,30 +85,8 @@ function run_single_test () {
git checkout --force $commit 2>/dev/null || die "Failed to checkout commit $commit"
err=""
- echo "## regenerate-makefiles.sh" >> $longlog
- ./regenerate-makefiles.sh >> $longlog 2>&1 || err="regenerate-makefiles"
- echo "## make clean" >> $longlog
- make clean >> $longlog 2>&1 || warn "make clean failed, suspicious"
-
- echo "## ./configure $MCC_CONFIGURE_ARGS" >> $longlog
- if [ -z "$err" ]; then
- ./configure $MCC_CONFIGURE_ARGS >> $longlog 2>&1 || err="configure"
- fi
-
- echo "## make" >> $longlog
- if [ -z "$err" ]; then
- make >> $longlog 2>&1 || err="make"
- fi
-
- echo "## make scripts" >> $longlog
- cd scripts
- if [ -z "$err" ]; then
- make >> $longlog 2>&1 || err="make scripts"
- fi
- cd ..
-
- cd regression-testing
+ cd regression-testing
regtest_file=$(echo "$REGTEST_ARCHIVE" | sed 's/^.*\///')
# download data for regression tests if necessary
@@ -118,15 +96,22 @@ function run_single_test () {
tar xzf $regtest_file
touch $regtest_file.ok
fi
+ regtest_dir=$PWD/$(basename $regtest_file .tgz)
+ cd ..
+
+
+ echo "## ./bjam clean" >> $longlog
+ ./bjam clean $MCC_CONFIGURE_ARGS --with-regtest=$regtest_dir >> $longlog 2>&1 || warn "bjam clean failed, suspicious"
+ echo "## ./bjam $MCC_CONFIGURE_ARGS" >> $longlog
+ if [ -z "$err" ]; then
+ ./bjam $MCC_CONFIGURE_ARGS >> $longlog 2>&1 || err="bjam"
+ fi
+
echo "## regression tests" >> $longlog
if [ -z "$err" ]; then
- ./run-test-suite.perl &>> $longlog
- regtest_status=$?
- [ $regtest_status -eq 1 ] && die "Failed to run regression tests"
- [ $regtest_status -eq 2 ] && err="regression tests"
+ ./bjam $MCC_CONFIGURE_ARGS --with-regtest=$regtest_dir >> $longlog 2>&1 || err="regression tests"
fi
- cd ..
if [ -z "$err" ] && [ "$MCC_RUN_EMS" = "yes" ]; then
echo "## EMS" >> $longlog
@@ -139,8 +124,7 @@ function run_single_test () {
cd ..
touch giza-pp.ok
fi
- sed -i 's#^my \$BINDIR\s*=.*#my \$BINDIR="'$(pwd)/giza-pp/bin/'";#' \
- scripts/training/train-model.perl
+ ./bjam $MCC_CONFIGURE_ARGS --with-giza="$(pwd)/giza-pp/bin" || err="bjam with-giza"
srilm_dir=$(echo $MCC_CONFIGURE_ARGS | sed -r 's/.*--with-srilm=([^ ]+) .*/\1/')
mach_type=$($srilm_dir/sbin/machine-type)
mkdir -p "$WORKDIR/ems_workdir"
diff --git a/depcomp b/depcomp
deleted file mode 100755
index 4c20c6c94..000000000
--- a/depcomp
+++ /dev/null
@@ -1,441 +0,0 @@
-#! /bin/sh
-
-# depcomp - compile a program generating dependencies as side-effects
-# Copyright 1999, 2000 Free Software Foundation, Inc.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2, or (at your option)
-# any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
-# 02111-1307, USA.
-
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
-
-if test -z "$depmode" || test -z "$source" || test -z "$object"; then
- echo "depcomp: Variables source, object and depmode must be set" 1>&2
- exit 1
-fi
-# `libtool' can also be set to `yes' or `no'.
-
-depfile=${depfile-`echo "$object" | sed 's,\([^/]*\)$,.deps/\1,;s/\.\([^.]*\)$/.P\1/'`}
-tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
-
-rm -f "$tmpdepfile"
-
-# Some modes work just like other modes, but use different flags. We
-# parameterize here, but still list the modes in the big case below,
-# to make depend.m4 easier to write. Note that we *cannot* use a case
-# here, because this file can only contain one case statement.
-if test "$depmode" = hp; then
- # HP compiler uses -M and no extra arg.
- gccflag=-M
- depmode=gcc
-fi
-
-if test "$depmode" = dashXmstdout; then
- # This is just like dashmstdout with a different argument.
- dashmflag=-xM
- depmode=dashmstdout
-fi
-
-case "$depmode" in
-gcc3)
-## gcc 3 implements dependency tracking that does exactly what
-## we want. Yay! Note: for some reason libtool 1.4 doesn't like
-## it if -MD -MP comes after the -MF stuff. Hmm.
- "$@" -MT "$object" -MD -MP -MF "$tmpdepfile"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- mv "$tmpdepfile" "$depfile"
- ;;
-
-gcc)
-## There are various ways to get dependency output from gcc. Here's
-## why we pick this rather obscure method:
-## - Don't want to use -MD because we'd like the dependencies to end
-## up in a subdir. Having to rename by hand is ugly.
-## (We might end up doing this anyway to support other compilers.)
-## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
-## -MM, not -M (despite what the docs say).
-## - Using -M directly means running the compiler twice (even worse
-## than renaming).
- if test -z "$gccflag"; then
- gccflag=-MD,
- fi
- "$@" -Wp,"$gccflag$tmpdepfile"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
-## The second -e expression handles DOS-style file names with drive letters.
- sed -e 's/^[^:]*: / /' \
- -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
-## This next piece of magic avoids the `deleted header file' problem.
-## The problem is that when a header file which appears in a .P file
-## is deleted, the dependency causes make to die (because there is
-## typically no way to rebuild the header). We avoid this by adding
-## dummy dependencies for each header file. Too bad gcc doesn't do
-## this for us directly.
- tr ' ' '
-' < "$tmpdepfile" |
-## Some versions of gcc put a space before the `:'. On the theory
-## that the space means something, we add a space to the output as
-## well.
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-hp)
- # This case exists only to let depend.m4 do its work. It works by
- # looking at the text of this script. This case will never be run,
- # since it is checked for above.
- exit 1
- ;;
-
-sgi)
- if test "$libtool" = yes; then
- "$@" "-Wp,-MDupdate,$tmpdepfile"
- else
- "$@" -MDupdate "$tmpdepfile"
- fi
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
-
- if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
- echo "$object : \\" > "$depfile"
-
- # Clip off the initial element (the dependent). Don't try to be
- # clever and replace this with sed code, as IRIX sed won't handle
- # lines with more than a fixed number of characters (4096 in
- # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
- # the IRIX cc adds comments like `#:fec' to the end of the
- # dependency line.
- tr ' ' '
-' < "$tmpdepfile" \
- | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \
- tr '
-' ' ' >> $depfile
- echo >> $depfile
-
- # The second pass generates a dummy entry for each header file.
- tr ' ' '
-' < "$tmpdepfile" \
- | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
- >> $depfile
- else
- # The sourcefile does not contain any dependencies, so just
- # store a dummy comment line, to avoid errors with the Makefile
- # "include basename.Plo" scheme.
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-aix)
- # The C for AIX Compiler uses -M and outputs the dependencies
- # in a .u file. This file always lives in the current directory.
- # Also, the AIX compiler puts `$object:' at the start of each line;
- # $object doesn't have directory information.
- stripped=`echo "$object" | sed -e 's,^.*/,,' -e 's/\(.*\)\..*$/\1/'`
- tmpdepfile="$stripped.u"
- outname="$stripped.o"
- if test "$libtool" = yes; then
- "$@" -Wc,-M
- else
- "$@" -M
- fi
-
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
-
- if test -f "$tmpdepfile"; then
- # Each line is of the form `foo.o: dependent.h'.
- # Do two passes, one to just change these to
- # `$object: dependent.h' and one to simply `dependent.h:'.
- sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile"
- sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile"
- else
- # The sourcefile does not contain any dependencies, so just
- # store a dummy comment line, to avoid errors with the Makefile
- # "include basename.Plo" scheme.
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-icc)
- # Must come before tru64.
-
- # Intel's C compiler understands `-MD -MF file'. However
- # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c
- # will fill foo.d with something like
- # foo.o: sub/foo.c
- # foo.o: sub/foo.h
- # which is wrong. We want:
- # sub/foo.o: sub/foo.c
- # sub/foo.o: sub/foo.h
- # sub/foo.c:
- # sub/foo.h:
-
- "$@" -MD -MF "$tmpdepfile"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
- # Each line is of the form `foo.o: dependent.h'.
- # Do two passes, one to just change these to
- # `$object: dependent.h' and one to simply `dependent.h:'.
- sed -e "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
- sed -e "s,^[^:]*: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-tru64)
- # The Tru64 AIX compiler uses -MD to generate dependencies as a side
- # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'.
- # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
- # dependencies in `foo.d' instead, so we check for that too.
- # Subdirectories are respected.
-
- tmpdepfile1="$object.d"
- tmpdepfile2=`echo "$object" | sed -e 's/.o$/.d/'`
- if test "$libtool" = yes; then
- "$@" -Wc,-MD
- else
- "$@" -MD
- fi
-
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile1" "$tmpdepfile2"
- exit $stat
- fi
-
- if test -f "$tmpdepfile1"; then
- tmpdepfile="$tmpdepfile1"
- else
- tmpdepfile="$tmpdepfile2"
- fi
- if test -f "$tmpdepfile"; then
- sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
- # That's a space and a tab in the [].
- sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
- else
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-#nosideeffect)
- # This comment above is used by automake to tell side-effect
- # dependency tracking mechanisms from slower ones.
-
-dashmstdout)
- # Important note: in order to support this mode, a compiler *must*
- # always write the proprocessed file to stdout, regardless of -o,
- # because we must use -o when running libtool.
- test -z "$dashmflag" && dashmflag=-M
- ( IFS=" "
- case " $* " in
- *" --mode=compile "*) # this is libtool, let us make it quiet
- for arg
- do # cycle over the arguments
- case "$arg" in
- "--mode=compile")
- # insert --quiet before "--mode=compile"
- set fnord "$@" --quiet
- shift # fnord
- ;;
- esac
- set fnord "$@" "$arg"
- shift # fnord
- shift # "$arg"
- done
- ;;
- esac
- "$@" $dashmflag | sed 's:^[^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile"
- ) &
- proc=$!
- "$@"
- stat=$?
- wait "$proc"
- if test "$stat" != 0; then exit $stat; fi
- rm -f "$depfile"
- cat < "$tmpdepfile" > "$depfile"
- tr ' ' '
-' < "$tmpdepfile" | \
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-dashXmstdout)
- # This case only exists to satisfy depend.m4. It is never actually
- # run, as this mode is specially recognized in the preamble.
- exit 1
- ;;
-
-makedepend)
- # X makedepend
- (
- shift
- cleared=no
- for arg in "$@"; do
- case $cleared in no)
- set ""; shift
- cleared=yes
- esac
- case "$arg" in
- -D*|-I*)
- set fnord "$@" "$arg"; shift;;
- -*)
- ;;
- *)
- set fnord "$@" "$arg"; shift;;
- esac
- done
- obj_suffix="`echo $object | sed 's/^.*\././'`"
- touch "$tmpdepfile"
- ${MAKEDEPEND-makedepend} 2>/dev/null -o"$obj_suffix" -f"$tmpdepfile" "$@"
- ) &
- proc=$!
- "$@"
- stat=$?
- wait "$proc"
- if test "$stat" != 0; then exit $stat; fi
- rm -f "$depfile"
- cat < "$tmpdepfile" > "$depfile"
- tail +3 "$tmpdepfile" | tr ' ' '
-' | \
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile" "$tmpdepfile".bak
- ;;
-
-cpp)
- # Important note: in order to support this mode, a compiler *must*
- # always write the proprocessed file to stdout, regardless of -o,
- # because we must use -o when running libtool.
- ( IFS=" "
- case " $* " in
- *" --mode=compile "*)
- for arg
- do # cycle over the arguments
- case $arg in
- "--mode=compile")
- # insert --quiet before "--mode=compile"
- set fnord "$@" --quiet
- shift # fnord
- ;;
- esac
- set fnord "$@" "$arg"
- shift # fnord
- shift # "$arg"
- done
- ;;
- esac
- "$@" -E |
- sed -n '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' |
- sed '$ s: \\$::' > "$tmpdepfile"
- ) &
- proc=$!
- "$@"
- stat=$?
- wait "$proc"
- if test "$stat" != 0; then exit $stat; fi
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- cat < "$tmpdepfile" >> "$depfile"
- sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-msvisualcpp)
- # Important note: in order to support this mode, a compiler *must*
- # always write the proprocessed file to stdout, regardless of -o,
- # because we must use -o when running libtool.
- ( IFS=" "
- case " $* " in
- *" --mode=compile "*)
- for arg
- do # cycle over the arguments
- case $arg in
- "--mode=compile")
- # insert --quiet before "--mode=compile"
- set fnord "$@" --quiet
- shift # fnord
- ;;
- esac
- set fnord "$@" "$arg"
- shift # fnord
- shift # "$arg"
- done
- ;;
- esac
- "$@" -E |
- sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile"
- ) &
- proc=$!
- "$@"
- stat=$?
- wait "$proc"
- if test "$stat" != 0; then exit $stat; fi
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile"
- echo " " >> "$depfile"
- . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-none)
- exec "$@"
- ;;
-
-*)
- echo "Unknown depmode $depmode" 1>&2
- exit 1
- ;;
-esac
-
-exit 0
diff --git a/install-sh b/install-sh
deleted file mode 100755
index 36f96f3e0..000000000
--- a/install-sh
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/bin/sh
-#
-# install - install a program, script, or datafile
-# This comes from X11R5 (mit/util/scripts/install.sh).
-#
-# Copyright 1991 by the Massachusetts Institute of Technology
-#
-# Permission to use, copy, modify, distribute, and sell this software and its
-# documentation for any purpose is hereby granted without fee, provided that
-# the above copyright notice appear in all copies and that both that
-# copyright notice and this permission notice appear in supporting
-# documentation, and that the name of M.I.T. not be used in advertising or
-# publicity pertaining to distribution of the software without specific,
-# written prior permission. M.I.T. makes no representations about the
-# suitability of this software for any purpose. It is provided "as is"
-# without express or implied warranty.
-#
-# Calling this script install-sh is preferred over install.sh, to prevent
-# `make' implicit rules from creating a file called install from it
-# when there is no Makefile.
-#
-# This script is compatible with the BSD install script, but was written
-# from scratch. It can only install one file at a time, a restriction
-# shared with many OS's install programs.
-
-
-# set DOITPROG to echo to test this script
-
-# Don't use :- since 4.3BSD and earlier shells don't like it.
-doit="${DOITPROG-}"
-
-
-# put in absolute paths if you don't have them in your path; or use env. vars.
-
-mvprog="${MVPROG-mv}"
-cpprog="${CPPROG-cp}"
-chmodprog="${CHMODPROG-chmod}"
-chownprog="${CHOWNPROG-chown}"
-chgrpprog="${CHGRPPROG-chgrp}"
-stripprog="${STRIPPROG-strip}"
-rmprog="${RMPROG-rm}"
-mkdirprog="${MKDIRPROG-mkdir}"
-
-transformbasename=""
-transform_arg=""
-instcmd="$mvprog"
-chmodcmd="$chmodprog 0755"
-chowncmd=""
-chgrpcmd=""
-stripcmd=""
-rmcmd="$rmprog -f"
-mvcmd="$mvprog"
-src=""
-dst=""
-dir_arg=""
-
-while [ x"$1" != x ]; do
- case $1 in
- -c) instcmd=$cpprog
- shift
- continue;;
-
- -d) dir_arg=true
- shift
- continue;;
-
- -m) chmodcmd="$chmodprog $2"
- shift
- shift
- continue;;
-
- -o) chowncmd="$chownprog $2"
- shift
- shift
- continue;;
-
- -g) chgrpcmd="$chgrpprog $2"
- shift
- shift
- continue;;
-
- -s) stripcmd=$stripprog
- shift
- continue;;
-
- -t=*) transformarg=`echo $1 | sed 's/-t=//'`
- shift
- continue;;
-
- -b=*) transformbasename=`echo $1 | sed 's/-b=//'`
- shift
- continue;;
-
- *) if [ x"$src" = x ]
- then
- src=$1
- else
- # this colon is to work around a 386BSD /bin/sh bug
- :
- dst=$1
- fi
- shift
- continue;;
- esac
-done
-
-if [ x"$src" = x ]
-then
- echo "$0: no input file specified" >&2
- exit 1
-else
- :
-fi
-
-if [ x"$dir_arg" != x ]; then
- dst=$src
- src=""
-
- if [ -d "$dst" ]; then
- instcmd=:
- chmodcmd=""
- else
- instcmd=$mkdirprog
- fi
-else
-
-# Waiting for this to be detected by the "$instcmd $src $dsttmp" command
-# might cause directories to be created, which would be especially bad
-# if $src (and thus $dsttmp) contains '*'.
-
- if [ -f "$src" ] || [ -d "$src" ]
- then
- :
- else
- echo "$0: $src does not exist" >&2
- exit 1
- fi
-
- if [ x"$dst" = x ]
- then
- echo "$0: no destination specified" >&2
- exit 1
- else
- :
- fi
-
-# If destination is a directory, append the input filename; if your system
-# does not like double slashes in filenames, you may need to add some logic
-
- if [ -d "$dst" ]
- then
- dst=$dst/`basename "$src"`
- else
- :
- fi
-fi
-
-## this sed command emulates the dirname command
-dstdir=`echo "$dst" | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'`
-
-# Make sure that the destination directory exists.
-# this part is taken from Noah Friedman's mkinstalldirs script
-
-# Skip lots of stat calls in the usual case.
-if [ ! -d "$dstdir" ]; then
-defaultIFS='
- '
-IFS="${IFS-$defaultIFS}"
-
-oIFS=$IFS
-# Some sh's can't handle IFS=/ for some reason.
-IFS='%'
-set - `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'`
-IFS=$oIFS
-
-pathcomp=''
-
-while [ $# -ne 0 ] ; do
- pathcomp=$pathcomp$1
- shift
-
- if [ ! -d "$pathcomp" ] ;
- then
- $mkdirprog "$pathcomp"
- else
- :
- fi
-
- pathcomp=$pathcomp/
-done
-fi
-
-if [ x"$dir_arg" != x ]
-then
- $doit $instcmd "$dst" &&
-
- if [ x"$chowncmd" != x ]; then $doit $chowncmd "$dst"; else : ; fi &&
- if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd "$dst"; else : ; fi &&
- if [ x"$stripcmd" != x ]; then $doit $stripcmd "$dst"; else : ; fi &&
- if [ x"$chmodcmd" != x ]; then $doit $chmodcmd "$dst"; else : ; fi
-else
-
-# If we're going to rename the final executable, determine the name now.
-
- if [ x"$transformarg" = x ]
- then
- dstfile=`basename "$dst"`
- else
- dstfile=`basename "$dst" $transformbasename |
- sed $transformarg`$transformbasename
- fi
-
-# don't allow the sed command to completely eliminate the filename
-
- if [ x"$dstfile" = x ]
- then
- dstfile=`basename "$dst"`
- else
- :
- fi
-
-# Make a couple of temp file names in the proper directory.
-
- dsttmp=$dstdir/#inst.$$#
- rmtmp=$dstdir/#rm.$$#
-
-# Trap to clean up temp files at exit.
-
- trap 'status=$?; rm -f "$dsttmp" "$rmtmp" && exit $status' 0
- trap '(exit $?); exit' 1 2 13 15
-
-# Move or copy the file name to the temp name
-
- $doit $instcmd "$src" "$dsttmp" &&
-
-# and set any options; do chmod last to preserve setuid bits
-
-# If any of these fail, we abort the whole thing. If we want to
-# ignore errors from any of these, just make sure not to ignore
-# errors from the above "$doit $instcmd $src $dsttmp" command.
-
- if [ x"$chowncmd" != x ]; then $doit $chowncmd "$dsttmp"; else :;fi &&
- if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd "$dsttmp"; else :;fi &&
- if [ x"$stripcmd" != x ]; then $doit $stripcmd "$dsttmp"; else :;fi &&
- if [ x"$chmodcmd" != x ]; then $doit $chmodcmd "$dsttmp"; else :;fi &&
-
-# Now remove or move aside any old file at destination location. We try this
-# two ways since rm can't unlink itself on some systems and the destination
-# file might be busy for other reasons. In this case, the final cleanup
-# might fail but the new file should still install successfully.
-
-{
- if [ -f "$dstdir/$dstfile" ]
- then
- $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null ||
- $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null ||
- {
- echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2
- (exit 1); exit
- }
- else
- :
- fi
-} &&
-
-# Now rename the file to the real destination.
-
- $doit $mvcmd "$dsttmp" "$dstdir/$dstfile"
-
-fi &&
-
-# The final little trick to "correctly" pass the exit status to the exit trap.
-
-{
- (exit 0); exit
-}
diff --git a/jam-files/LICENSE_1_0.txt b/jam-files/LICENSE_1_0.txt
new file mode 100644
index 000000000..36b7cd93c
--- /dev/null
+++ b/jam-files/LICENSE_1_0.txt
@@ -0,0 +1,23 @@
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/jam-files/boost-build/boost-build.jam b/jam-files/boost-build/boost-build.jam
new file mode 100644
index 000000000..73db0497b
--- /dev/null
+++ b/jam-files/boost-build/boost-build.jam
@@ -0,0 +1,8 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build kernel ;
diff --git a/jam-files/boost-build/bootstrap.jam b/jam-files/boost-build/bootstrap.jam
new file mode 100644
index 000000000..af3e8bf50
--- /dev/null
+++ b/jam-files/boost-build/bootstrap.jam
@@ -0,0 +1,18 @@
+# Copyright (c) 2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file handles initial phase of Boost.Build loading.
+# Boost.Jam has already figured out where Boost.Build is
+# and loads this file, which is responsible for initialization
+# of basic facilities such a module system and loading the
+# main Boost.Build module, build-system.jam.
+#
+# Exact operation of this module is not interesting, it makes
+# sense to look at build-system.jam right away.
+
+# Load the kernel/bootstrap.jam, which does all the work.
+.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
+include $(.bootstrap-file) ; \ No newline at end of file
diff --git a/jam-files/boost-build/build-system.jam b/jam-files/boost-build/build-system.jam
new file mode 100644
index 000000000..9f9c884cc
--- /dev/null
+++ b/jam-files/boost-build/build-system.jam
@@ -0,0 +1,1008 @@
+# Copyright 2003, 2005, 2007 Dave Abrahams
+# Copyright 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is part of Boost Build version 2. You can think of it as forming the
+# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
+
+import build-request ;
+import builtin ;
+import "class" : new ;
+import errors ;
+import feature ;
+import make ;
+import modules ;
+import os ;
+import path ;
+import project ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import utility ;
+import version ;
+import virtual-target ;
+import generators ;
+import configure ;
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Shortcut used in this module for accessing used command-line parameters.
+.argv = [ modules.peek : ARGV ] ;
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
+
+# Legacy option doing too many things, some of which are not even documented.
+# Should be phased out.
+# * Disables loading site and user configuration files.
+# * Disables auto-configuration for toolsets specified explicitly on the
+# command-line.
+# * Causes --toolset command-line options to be ignored.
+# * Prevents the default toolset from being used even if no toolset has been
+# configured at all.
+.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ;
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+.project-targets = ;
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+.results-of-main-targets = ;
+
+# Was an XML dump requested?
+.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+.default-toolset = ;
+.default-toolset-version = ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+rule command-line-free-features ( )
+{
+ return $(.command-line-free-features) ;
+}
+
+
+# Returns the location of the build system. The primary use case is building
+# Boost where it is sometimes needed to get the location of other components
+# (e.g. BoostBook files) and it is convenient to use locations relative to the
+# Boost Build path.
+#
+rule location ( )
+{
+ local r = [ modules.binding build-system ] ;
+ return $(r:P) ;
+}
+
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+rule set-default-toolset ( toolset : version ? )
+{
+ .default-toolset = $(toolset) ;
+ .default-toolset-version = $(version) ;
+}
+
+rule set-pre-build-hook ( function )
+{
+ .pre-build-hook = $(function) ;
+}
+
+rule set-post-build-hook ( function )
+{
+ .post-build-hook = $(function) ;
+}
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+local rule actual-clean-targets ( )
+{
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for local t in $(targets)
+ {
+ if [ class.is-a $(t) : project-target ]
+ {
+ .project-targets += [ $(t).project-module ] ;
+ }
+ }
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ local targets-to-clean ;
+ for local t in $(.results-of-main-targets)
+ {
+ # Do not include roots or sources.
+ targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ }
+ targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
+
+ local to-clean ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local p = [ $(t).project ] ;
+
+ # Remove only derived targets.
+ if [ $(t).action ]
+ {
+ if $(t) in $(targets-to-clean) ||
+ [ should-clean-project [ $(p).project-module ] ] = true
+ {
+ to-clean += $(t) ;
+ }
+ }
+ }
+
+ local to-clean-actual ;
+ for local t in $(to-clean)
+ {
+ to-clean-actual += [ $(t).actualize ] ;
+ }
+ return $(to-clean-actual) ;
+}
+
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+local rule find-target ( target-id )
+{
+ local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
+
+ local pm ;
+ if $(split)
+ {
+ pm = [ project.find $(split[1]) : "." ] ;
+ }
+ else
+ {
+ pm = [ project.find $(target-id) : "." ] ;
+ }
+
+ local result ;
+ if $(pm)
+ {
+ result = [ project.target $(pm) ] ;
+ }
+
+ if $(split)
+ {
+ result = [ $(result).find $(split[2]) ] ;
+ }
+
+ return $(result) ;
+}
+
+
+# Initializes a new configuration module.
+#
+local rule initialize-config-module ( module-name : location ? )
+{
+ project.initialize $(module-name) : $(location) ;
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+local rule load-config ( module-name : filename : path + : must-find ? )
+{
+ if $(.debug-config)
+ {
+ ECHO "notice: Searching" "$(path)" "for" "$(module-name)"
+ "configuration file" "$(filename)" "." ;
+ }
+ local where = [ GLOB $(path) : $(filename) ] ;
+ if $(where)
+ {
+ where = [ NORMALIZE_PATH $(where[1]) ] ;
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading" "$(module-name)" "configuration file"
+ "$(filename)" "from" $(where) "." ;
+ }
+
+ # Set source location so that path-constant in config files
+ # with relative paths work. This is of most importance
+ # for project-config.jam, but may be used in other
+ # config files as well.
+ local attributes = [ project.attributes $(module-name) ] ;
+ $(attributes).set source-location : $(where:D) : exact ;
+ modules.load $(module-name) : $(filename) : $(path) ;
+ project.load-used-projects $(module-name) ;
+ }
+ else
+ {
+ if $(must-find)
+ {
+ errors.user-error "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ if $(.debug-config)
+ {
+ ECHO "notice:" "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ }
+ return $(where) ;
+}
+
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
+# path, in that order. Not loaded in case the test-config configuration file is
+# loaded or either the --ignore-site-config or the --ignore-config command-line
+# option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded, --ignore-config
+# command-line option is specified or an empty file name is explicitly
+# specified. If the file name has been given explicitly then the file must
+# exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+local rule load-configuration-files
+{
+ # Flag indicating that site configuration should not be loaded.
+ local ignore-site-config =
+ [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
+
+ if $(.legacy-ignore-config) && $(.debug-config)
+ {
+ ECHO "notice: Regular site and user configuration files will be ignored" ;
+ ECHO "notice: due to the --ignore-config command-line option." ;
+ }
+
+ initialize-config-module test-config ;
+ local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
+ local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
+ if $(uq)
+ {
+ test-config = $(uq) ;
+ }
+ if $(test-config)
+ {
+ local where =
+ [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ;
+ if $(where)
+ {
+ if $(.debug-config) && ! $(.legacy-ignore-config)
+ {
+ ECHO "notice: Regular site and user configuration files will" ;
+ ECHO "notice: be ignored due to the test configuration being"
+ "loaded." ;
+ }
+ }
+ else
+ {
+ test-config = ;
+ }
+ }
+
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local site-path = /etc $(user-path) ;
+ if [ os.name ] in NT CYGWIN
+ {
+ site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+ }
+
+ if $(ignore-site-config) && !$(.legacy-ignore-config)
+ {
+ ECHO "notice: Site configuration files will be ignored due to the" ;
+ ECHO "notice: --ignore-site-config command-line option." ;
+ }
+
+ initialize-config-module site-config ;
+ if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config)
+ {
+ load-config site-config : site-config.jam : $(site-path) ;
+ }
+
+ initialize-config-module user-config ;
+ if ! $(test-config) && ! $(.legacy-ignore-config)
+ {
+ local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
+ user-config = $(user-config[-1]) ;
+ user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user-config = [ utility.unquote $(user-config) ] ;
+ local explicitly-requested = $(user-config) ;
+ user-config ?= user-config.jam ;
+
+ if $(user-config)
+ {
+ if $(explicitly-requested)
+ {
+ # Treat explicitly entered user paths as native OS path
+ # references and, if non-absolute, root them at the current
+ # working directory.
+ user-config = [ path.make $(user-config) ] ;
+ user-config = [ path.root $(user-config) [ path.pwd ] ] ;
+ user-config = [ path.native $(user-config) ] ;
+
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading explicitly specified user"
+ "configuration file:" ;
+ ECHO " $(user-config)" ;
+ }
+
+ load-config user-config : $(user-config:BS) : $(user-config:D)
+ : must-exist ;
+ }
+ else
+ {
+ load-config user-config : $(user-config) : $(user-path) ;
+ }
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice: User configuration file loading explicitly disabled." ;
+ }
+ }
+
+ # We look for project-config.jam from "." upward.
+ # I am not sure this is 100% right decision, we might as well check for
+ # it only alonside the Jamroot file. However:
+ #
+ # - We need to load project-root.jam before Jamroot
+ # - We probably would need to load project-root.jam even if there's no
+ # Jamroot - e.g. to implement automake-style out-of-tree builds.
+ local file = [ path.glob "." : project-config.jam ] ;
+ if ! $(file)
+ {
+ file = [ path.glob-in-parents "." : project-config.jam ] ;
+ }
+ if $(file)
+ {
+ initialize-config-module project-config : $(file:D) ;
+ load-config project-config : project-config.jam : $(file:D) ;
+ }
+}
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+local rule process-explicit-toolset-requests
+{
+ local extra-properties ;
+
+ local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
+ local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
+
+ for local t in $(option-toolsets) $(feature-toolsets)
+ {
+ # Parse toolset-version/properties.
+ local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
+ local toolset-version = $((t-v,t,v)[1]) ;
+ local toolset = $((t-v,t,v)[2]) ;
+ local version = $((t-v,t,v)[3]) ;
+
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] Detected command-line request for
+ $(toolset-version): "toolset=" $(toolset) "version="
+ $(version) ;
+ }
+
+ # If the toolset is not known, configure it now.
+ local known ;
+ if $(toolset) in [ feature.values <toolset> ]
+ {
+ known = true ;
+ }
+ if $(known) && $(version) && ! [ feature.is-subvalue toolset
+ : $(toolset) : version : $(version) ]
+ {
+ known = ;
+ }
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if ! $(known)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: [cmdline-cfg] toolset $(toolset-version) not"
+ "previously configured; attempting to auto-configure now" ;
+ }
+ toolset.using $(toolset) : $(version) ;
+ }
+ else
+ {
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] toolset $(toolset-version) already
+ configured ;
+ }
+ }
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets)
+ {
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build
+ request. ;
+ }
+ extra-properties += toolset=$(t) ;
+ }
+ }
+
+ return $(extra-properties) ;
+}
+
+
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+local rule should-clean-project ( project )
+{
+ if ! $(.should-clean-project.$(project))
+ {
+ local r = false ;
+ if $(project) in $(.project-targets)
+ {
+ r = true ;
+ }
+ else
+ {
+ local parent = [ project.attribute $(project) parent-module ] ;
+ if $(parent) && $(parent) != user-config
+ {
+ r = [ should-clean-project $(parent) ] ;
+ }
+ }
+ .should-clean-project.$(project) = $(r) ;
+ }
+
+ return $(.should-clean-project.$(project)) ;
+}
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+{
+ if --version in $(.argv)
+ {
+ version.print ;
+ EXIT ;
+ }
+
+ version.verify-engine-version ;
+
+ load-configuration-files ;
+
+ local extra-properties ;
+ # Note that this causes --toolset options to be ignored if --ignore-config
+ # is specified.
+ if ! $(.legacy-ignore-config)
+ {
+ extra-properties = [ process-explicit-toolset-requests ] ;
+ }
+
+
+ # We always load project in "." so that 'use-project' directives have any
+ # chance of being seen. Otherwise, we would not be able to refer to
+ # subprojects using target ids.
+ local current-project ;
+ if [ project.find "." : "." ]
+ {
+ current-project = [ project.target [ project.load "." ] ] ;
+ }
+
+
+ # In case there are no toolsets currently defined makes the build run using
+ # the default toolset.
+ if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ]
+ {
+ local default-toolset = $(.default-toolset) ;
+ local default-toolset-version = ;
+ if $(default-toolset)
+ {
+ default-toolset-version = $(.default-toolset-version) ;
+ }
+ else
+ {
+ default-toolset = gcc ;
+ if [ os.name ] = NT
+ {
+ default-toolset = msvc ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ default-toolset = darwin ;
+ }
+ }
+
+ ECHO "warning: No toolsets are configured." ;
+ ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
+ ECHO "warning: If the default is wrong, your build may not work correctly." ;
+ ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
+ ECHO "warning: For more configuration options, please consult" ;
+ ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+
+ toolset.using $(default-toolset) : $(default-toolset-version) ;
+ }
+
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ local build-request = [ build-request.from-command-line $(.argv)
+ $(extra-properties) ] ;
+ local target-ids = [ $(build-request).get-at 1 ] ;
+ local properties = [ $(build-request).get-at 2 ] ;
+
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ if $(properties)
+ {
+ expanded = [ build-request.expand-no-defaults $(properties) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
+
+ # Check that we actually found something to build.
+ if ! $(current-project) && ! $(target-ids)
+ {
+ errors.user-error "error: no Jamfile in current directory found, and no"
+ "target references specified." ;
+ EXIT ;
+ }
+
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ local clean ; if "--clean" in $(.argv) { clean = true ; }
+ local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
+
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ local explicitly-requested-files
+
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ local targets ;
+ local virtual-targets ;
+ local actual-targets ;
+
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
+ for local id in $(target-ids)
+ {
+ if $(id) = clean
+ {
+ clean = true ;
+ }
+ else
+ {
+ local t ;
+ if $(current-project)
+ {
+ t = [ $(current-project).find $(id) : no-error ] ;
+ }
+ else
+ {
+ t = [ find-target $(id) ] ;
+ }
+
+ if ! $(t)
+ {
+ ECHO "notice: could not find main target" $(id) ;
+ ECHO "notice: assuming it is a name of file to create." ;
+ explicitly-requested-files += $(id) ;
+ }
+ else
+ {
+ targets += $(t) ;
+ }
+ }
+ }
+ if ! $(targets)
+ {
+ targets += [ project.target [ project.module-name "." ] ] ;
+ }
+
+ if [ option.get dump-generators : : true ]
+ {
+ generators.dump ;
+ }
+
+ # We wish to put config.log in the build directory corresponding
+ # to Jamroot, so that the location does not differ depending on
+ # directory where we do build. The amount of indirection necessary
+ # here is scary.
+ local first-project = [ $(targets[0]).project ] ;
+ local first-project-root-location = [ $(first-project).get project-root ] ;
+ local first-project-root-module = [ project.load $(first-project-root-location) ] ;
+ local first-project-root = [ project.target $(first-project-root-module) ] ;
+ local first-build-build-dir = [ $(first-project-root).build-dir ] ;
+ configure.set-log-file $(first-build-build-dir)/config.log ;
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for local p in $(expanded)
+ {
+ .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
+ for local t in $(targets)
+ {
+ local g = [ $(t).generate $(p) ] ;
+ if ! [ class.is-a $(t) : project-target ]
+ {
+ .results-of-main-targets += $(g[2-]) ;
+ }
+ virtual-targets += $(g[2-]) ;
+ }
+ }
+
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in $(virtual-targets)
+ {
+ actual-targets += [ $(t).actualize ] ;
+ }
+
+
+ # If XML data output has been requested prepare additional rules and targets
+ # so we can hook into Jam to collect build data while its building and have
+ # it trigger the final XML report generation after all the planned targets
+ # have been built.
+ if $(.out-xml)
+ {
+ # Get a qualified virtual target name.
+ rule full-target-name ( target )
+ {
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+ local project-path = [ $(project).get location ] ;
+ return $(project-path)//$(name) ;
+ }
+
+ # Generate an XML file containing build statistics for each constituent.
+ #
+ rule out-xml ( xml-file : constituents * )
+ {
+ # Prepare valid XML header and footer with some basic info.
+ local nl = "
+" ;
+ local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+ local timestamp = [ modules.peek : JAMDATE ] ;
+ local cwd = [ PWD ] ;
+ local command = $(.argv) ;
+ local bb-version = [ version.boost-build ] ;
+ .header on $(xml-file) =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+ "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+ "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+ "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+ "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+ "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+ ;
+ .footer on $(xml-file) =
+ "$(nl)</build>" ;
+
+ # Generate the target dependency graph.
+ .contents on $(xml-file) +=
+ "$(nl) <targets>" ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local action = [ $(t).action ] ;
+ if $(action)
+ # If a target has no action, it has no dependencies.
+ {
+ local name = [ full-target-name $(t) ] ;
+ local sources = [ $(action).sources ] ;
+ local dependencies ;
+ for local s in $(sources)
+ {
+ dependencies += [ full-target-name $(s) ] ;
+ }
+
+ local path = [ $(t).path ] ;
+ local jam-target = [ $(t).actual-name ] ;
+
+ .contents on $(xml-file) +=
+ "$(nl) <target>"
+ "$(nl) <name><![CDATA[$(name)]]></name>"
+ "$(nl) <dependencies>"
+ "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+ "$(nl) </dependencies>"
+ "$(nl) <path><![CDATA[$(path)]]></path>"
+ "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+ "$(nl) </target>"
+ ;
+ }
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </targets>" ;
+
+ # Build $(xml-file) after $(constituents). Do so even if a
+ # constituent action fails and regenerate the xml on every bjam run.
+ INCLUDES $(xml-file) : $(constituents) ;
+ ALWAYS $(xml-file) ;
+ __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
+ out-xml.generate $(xml-file) ;
+ }
+
+ # The actual build actions are here; if we did this work in the actions
+ # clause we would have to form a valid command line containing the
+ # result of @(...) below (the name of the XML file).
+ #
+ rule out-xml.generate-action ( args * : xml-file
+ : command status start end user system : output ? )
+ {
+ local contents =
+ [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+ local f = @($(xml-file):E=$(contents)) ;
+ }
+
+ # Nothing to do here; the *real* actions happen in
+ # out-xml.generate-action.
+ actions quietly out-xml.generate { }
+
+ # Define the out-xml file target, which depends on all the targets so
+ # that it runs the collection after the targets have run.
+ out-xml $(.out-xml) : $(actual-targets) ;
+
+ # Set up a global __ACTION_RULE__ that records all the available
+ # statistics about each actual target in a variable "on" the --out-xml
+ # target.
+ #
+ rule out-xml.collect ( xml-file : target : command status start end user
+ system : output ? )
+ {
+ local nl = "
+" ;
+ # Open the action with some basic info.
+ .contents on $(xml-file) +=
+ "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+ # If we have an action object we can print out more detailed info.
+ local action = [ on $(target) return $(.action) ] ;
+ if $(action)
+ {
+ local action-name = [ $(action).action-name ] ;
+ local action-sources = [ $(action).sources ] ;
+ local action-props = [ $(action).properties ] ;
+
+ # The qualified name of the action which we created the target.
+ .contents on $(xml-file) +=
+ "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+ # The sources that made up the target.
+ .contents on $(xml-file) +=
+ "$(nl) <sources>" ;
+ for local source in $(action-sources)
+ {
+ local source-actual = [ $(source).actual-name ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </sources>" ;
+
+ # The properties that define the conditions under which the
+ # target was built.
+ .contents on $(xml-file) +=
+ "$(nl) <properties>" ;
+ for local prop in [ $(action-props).raw ]
+ {
+ local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </properties>" ;
+ }
+
+ local locate = [ on $(target) return $(LOCATE) ] ;
+ locate ?= "" ;
+ .contents on $(xml-file) +=
+ "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+ "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+ "$(nl) <command><![CDATA[$(command)]]></command>"
+ "$(nl) <output><![CDATA[$(output)]]></output>" ;
+ .contents on $(xml-file) +=
+ "$(nl) </action>" ;
+ }
+
+ # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+ # the global module.
+ module
+ {
+ __ACTION_RULE__ = build-system.out-xml.collect
+ [ modules.peek build-system : .out-xml ] ;
+ }
+
+ IMPORT
+ build-system :
+ out-xml.collect
+ out-xml.generate-action
+ : :
+ build-system.out-xml.collect
+ build-system.out-xml.generate-action
+ ;
+ }
+
+ local j = [ option.get jobs ] ;
+ if $(j)
+ {
+ modules.poke : PARALLELISM : $(j) ;
+ }
+
+ local k = [ option.get keep-going : true : true ] ;
+ if $(k) in "on" "yes" "true"
+ {
+ modules.poke : KEEP_GOING : 1 ;
+ }
+ else if $(k) in "off" "no" "false"
+ {
+ modules.poke : KEEP_GOING : 0 ;
+ }
+ else
+ {
+ ECHO "error: Invalid value for the --keep-going option" ;
+ EXIT ;
+ }
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ NOTFILE all ;
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if $(explicitly-requested-files)
+ {
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
+ }
+ else if $(cleanall)
+ {
+ UPDATE clean-all ;
+ }
+ else if $(clean)
+ {
+ common.Clean clean : [ actual-clean-targets ] ;
+ UPDATE clean ;
+ }
+ else
+ {
+ configure.print-configure-checks-summary ;
+
+ if $(.pre-build-hook)
+ {
+ $(.pre-build-hook) ;
+ }
+
+ DEPENDS all : $(actual-targets) ;
+ if UPDATE_NOW in [ RULENAMES ]
+ {
+ local ok = [ UPDATE_NOW all $(.out-xml) ] ;
+ if $(.post-build-hook)
+ {
+ $(.post-build-hook) $(ok) ;
+ }
+ # Prevent automatic update of the 'all' target, now that
+ # we have explicitly updated what we wanted.
+ UPDATE ;
+ }
+ else
+ {
+ UPDATE all $(.out-xml) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/build/ac.jam b/jam-files/boost-build/build/ac.jam
new file mode 100644
index 000000000..6768f358c
--- /dev/null
+++ b/jam-files/boost-build/build/ac.jam
@@ -0,0 +1,198 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property-set ;
+import path ;
+import modules ;
+import "class" ;
+import errors ;
+import configure ;
+
+rule find-include-path ( variable : properties : header
+ : provided-path ? )
+{
+ # FIXME: document which properties affect this function by
+ # default.
+ local target-os = [ $(properties).get <target-os> ] ;
+ properties = [ property-set.create <target-os>$(toolset) ] ;
+ if $($(variable)-$(properties))
+ {
+ return $($(variable)-$(properties)) ;
+ }
+ else
+ {
+ provided-path ?= [ modules.peek : $(variable) ] ;
+ includes = $(provided-path) ;
+ includes += [ $(properties).get <include> ] ;
+ if [ $(properties).get <target-os> ] != windows
+ {
+ # FIXME: use sysroot
+ includes += /usr/include ;
+ }
+
+ local result ;
+ while ! $(result) && $(includes)
+ {
+ local f = [ path.root $(header) $(includes[1]) ] ;
+ ECHO "Checking " $(f) ;
+ if [ path.exists $(f) ]
+ {
+ result = $(includes[1]) ;
+ }
+ else if $(provided-path)
+ {
+ errors.user-error "Could not find header" $(header)
+ : "in the user-specified directory" $(provided-path) ;
+ }
+ includes = $(includes[2-]) ;
+ }
+ $(variable)-$(properties) = $(result) ;
+ return $(result) ;
+ }
+}
+
+rule find-library ( variable : properties : names + : provided-path ? )
+{
+ local target-os = [ $(properties).get <target-os> ] ;
+ properties = [ property-set.create <target-os>$(toolset) ] ;
+ if $($(variable)-$(properties))
+ {
+ return $($(variable)-$(properties)) ;
+ }
+ else
+ {
+ provided-path ?= [ modules.peek : $(variable) ] ;
+ paths = $(provided-path) ;
+ paths += [ $(properties).get <library-path> ] ;
+ if [ $(properties).get <target-os> ] != windows
+ {
+ paths += /usr/lib /usr/lib32 /usr/lib64 ;
+ }
+
+ local result ;
+ while ! $(result) && $(paths)
+ {
+ while ! $(result) && $(names)
+ {
+ local f ;
+ if $(target-os) = windows
+ {
+ f = $(paths[1])/$(names[1]).lib ;
+ if [ path.exists $(f) ]
+ {
+ result = $(f) ;
+ }
+ }
+ else
+ {
+ # FIXME: check for .a as well, depending on
+ # the 'link' feature.
+ f = $(paths[1])/lib$(names[1]).so ;
+ ECHO "CHECKING $(f) " ;
+ if [ path.exists $(f) ]
+ {
+ result = $(f) ;
+ }
+ }
+ if ! $(result) && $(provided-path)
+ {
+ errors.user-error "Could not find either of: " $(names)
+ : "in the user-specified directory" $(provided-path) ;
+
+ }
+ names = $(names[2-]) ;
+ }
+ paths = $(paths[2-]) ;
+ }
+ $(variable)-$(properties) = $(result) ;
+ return $(result) ;
+ }
+}
+
+class ac-library : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+ import ac ;
+ import configure ;
+
+ rule __init__ ( name : project : * : * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) ;
+
+ reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule set-header ( header )
+ {
+ self.header = $(header) ;
+ }
+
+ rule set-default-names ( names + )
+ {
+ self.default-names = $(names) ;
+ }
+
+ rule reconfigure ( * : * )
+ {
+ ECHO "XXX" $(1) ;
+ if ! $(1)
+ {
+ # This is 'using xxx ;'. Nothing to configure, really.
+ }
+ else
+ {
+ for i in 1 2 3 4 5 6 7 8 9
+ {
+ # FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY
+ if ! ( $($(i)[1]) in root include-path library-path library-name condition )
+ {
+ errors.user-error "Invalid named parameter" $($(i)[1]) ;
+ }
+ local name = $($(i)[1]) ;
+ local value = $($(i)[2-]) ;
+ if $($(name)) && $($(name)) != $(value)
+ {
+ errors.user-error "Attempt to change value of '$(name)'" ;
+ }
+ $(name) = $(value) ;
+ }
+
+ include-path ?= $(root)/include ;
+ library-path ?= $(root)/lib ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ # FIXME: log results.
+ local libnames = $(library-name) ;
+ if ! $(libnames) && ! $(include-path) && ! $(library-path)
+ {
+ libnames = [ modules.peek : $(name:U)_NAME ] ;
+ # Backward compatibility only.
+ libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
+ }
+ libnames ?= $(self.default-names) ;
+
+ local includes = [
+ ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ;
+ local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ;
+ if $(includes) && $(library)
+ {
+ library = [ virtual-target.from-file $(library) : . : $(self.project) ] ;
+ configure.log-library-search-result $(name) : "found" ;
+ return [ property-set.create <include>$(includes) <source>$(library) ] ;
+ }
+ else
+ {
+ configure.log-library-search-result $(name) : "no found" ;
+ }
+ }
+}
+
diff --git a/jam-files/boost-build/build/alias.jam b/jam-files/boost-build/build/alias.jam
new file mode 100644
index 000000000..48019cb98
--- /dev/null
+++ b/jam-files/boost-build/build/alias.jam
@@ -0,0 +1,73 @@
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'alias' rule and the associated target class.
+#
+# Alias is just a main target which returns its source targets without any
+# processing. For example:
+#
+# alias bin : hello test_hello ;
+# alias lib : helpers xml_parser ;
+#
+# Another important use of 'alias' is to conveniently group source files:
+#
+# alias platform-src : win.cpp : <os>NT ;
+# alias platform-src : linux.cpp : <os>LINUX ;
+# exe main : main.cpp platform-src ;
+#
+# Lastly, it is possible to create a local alias for some target, with different
+# properties:
+#
+# alias big_lib : : @/external_project/big_lib/<link>static ;
+#
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class alias-target-class : basic-target
+{
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] $(source-targets) ;
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
+ return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
+ }
+}
+
+
+# Declares the 'alias' target. It will process its sources virtual-targets by
+# returning them unaltered as its own constructed virtual-targets.
+#
+rule alias ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project)
+ ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : alias : : alias ;
diff --git a/jam-files/boost-build/build/build-request.jam b/jam-files/boost-build/build/build-request.jam
new file mode 100644
index 000000000..8a1f7b0eb
--- /dev/null
+++ b/jam-files/boost-build/build/build-request.jam
@@ -0,0 +1,322 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import sequence ;
+import set ;
+import regex ;
+import feature ;
+import property ;
+import container ;
+import string ;
+
+
+# Transform property-set by applying f to each component property.
+#
+local rule apply-to-property-set ( f property-set )
+{
+ local properties = [ feature.split $(property-set) ] ;
+ return [ string.join [ $(f) $(properties) ] : / ] ;
+}
+
+
+# Expand the given build request by combining all property-sets which do not
+# specify conflicting non-free features. Expects all the project files to
+# already be loaded.
+#
+rule expand-no-defaults ( property-sets * )
+{
+ # First make all features and subfeatures explicit.
+ local expanded-property-sets = [ sequence.transform apply-to-property-set
+ feature.expand-subfeatures : $(property-sets) ] ;
+
+ # Now combine all of the expanded property-sets
+ local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
+
+ return $(product) ;
+}
+
+
+# Implementation of x-product, below. Expects all the project files to already
+# be loaded.
+#
+local rule x-product-aux ( property-sets + )
+{
+ local result ;
+ local p = [ feature.split $(property-sets[1]) ] ;
+ local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
+ local seen ;
+ # No conflict with things used at a higher level?
+ if ! [ set.intersection $(f) : $(x-product-used) ]
+ {
+ local x-product-seen ;
+ {
+ # Do not mix in any conflicting features.
+ local x-product-used = $(x-product-used) $(f) ;
+
+ if $(property-sets[2])
+ {
+ local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ result = $(property-sets[1])/$(rest) ;
+ }
+
+ result ?= $(property-sets[1]) ;
+ }
+
+ # If we did not encounter a conflicting feature lower down, do not
+ # recurse again.
+ if ! [ set.intersection $(f) : $(x-product-seen) ]
+ {
+ property-sets = ;
+ }
+
+ seen = $(x-product-seen) ;
+ }
+
+ if $(property-sets[2])
+ {
+ result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ }
+
+ # Note that we have seen these features so that higher levels will recurse
+ # again without them set.
+ x-product-seen += $(f) $(seen) ;
+ return $(result) ;
+}
+
+
+# Return the cross-product of all elements of property-sets, less any that would
+# contain conflicting values for single-valued features. Expects all the project
+# files to already be loaded.
+#
+local rule x-product ( property-sets * )
+{
+ if $(property-sets).non-empty
+ {
+ # Prepare some "scoped globals" that can be used by the implementation
+ # function, x-product-aux.
+ local x-product-seen x-product-used ;
+ return [ x-product-aux $(property-sets) : $(feature-space) ] ;
+ }
+ # Otherwise return empty.
+}
+
+
+# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
+# an implicit value. Expects all the project files to already be loaded.
+#
+local rule looks-like-implicit-value ( v )
+{
+ if [ feature.is-implicit-value $(v) ]
+ {
+ return true ;
+ }
+ else
+ {
+ local split = [ regex.split $(v) - ] ;
+ if [ feature.is-implicit-value $(split[1]) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Takes the command line tokens (such as taken from the ARGV rule) and
+# constructs a build request from them. Returns a vector of two vectors (where
+# "vector" means container.jam's "vector"). First is the set of targets
+# specified in the command line, and second is the set of requested build
+# properties. Expects all the project files to already be loaded.
+#
+rule from-command-line ( command-line * )
+{
+ local targets ;
+ local properties ;
+
+ command-line = $(command-line[2-]) ;
+ local skip-next = ;
+ for local e in $(command-line)
+ {
+ if $(skip-next)
+ {
+ skip-next = ;
+ }
+ else if ! [ MATCH "^(-).*" : $(e) ]
+ {
+ # Build request spec either has "=" in it or completely consists of
+ # implicit feature values.
+ local fs = feature-space ;
+ if [ MATCH "(.*=.*)" : $(e) ]
+ || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
+ {
+ properties += [ convert-command-line-element $(e) :
+ $(feature-space) ] ;
+ }
+ else
+ {
+ targets += $(e) ;
+ }
+ }
+ else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
+ {
+ skip-next = true ;
+ }
+ }
+ return [ new vector
+ [ new vector $(targets) ]
+ [ new vector $(properties) ] ] ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form. Expects all the project files to already be loaded.
+#
+local rule convert-command-line-element ( e )
+{
+ local result ;
+ local parts = [ regex.split $(e) "/" ] ;
+ while $(parts)
+ {
+ local p = $(parts[1]) ;
+ local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
+ local lresult ;
+ local feature ;
+ local values ;
+ if $(m)
+ {
+ feature = $(m[1]) ;
+ values = [ regex.split $(m[2]) "," ] ;
+ lresult = <$(feature)>$(values) ;
+ }
+ else
+ {
+ lresult = [ regex.split $(p) "," ] ;
+ }
+
+ if $(feature) && free in [ feature.attributes $(feature) ]
+ {
+ # If we have free feature, then the value is everything
+ # until the end of the command line token. Slashes in
+ # the following string are not taked to mean separation
+ # of properties. Commas are also not interpreted specially.
+ values = $(values:J=,) ;
+ values = $(values) $(parts[2-]) ;
+ values = $(values:J=/) ;
+ lresult = <$(feature)>$(values) ;
+ parts = ;
+ }
+
+ if ! [ MATCH (.*-.*) : $(p) ]
+ {
+ # property.validate cannot handle subfeatures, so we avoid the check
+ # here.
+ for local p in $(lresult)
+ {
+ property.validate $(p) : $(feature-space) ;
+ }
+ }
+
+ if ! $(result)
+ {
+ result = $(lresult) ;
+ }
+ else
+ {
+ result = $(result)/$(lresult) ;
+ }
+
+ parts = $(parts[2-]) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import feature ;
+
+ feature.prepare-test build-request-test-temp ;
+
+ import build-request ;
+ import build-request : expand-no-defaults : build-request.expand-no-defaults ;
+ import errors : try catch ;
+ import feature : feature subfeature ;
+
+ feature toolset : gcc msvc borland : implicit ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+ 3.0 3.0.1 3.0.2 : optional ;
+
+ feature variant : debug release : implicit composite ;
+ feature inlining : on off ;
+ feature "include" : : free ;
+
+ feature stdlib : native stlport : implicit ;
+
+ feature runtime-link : dynamic static : symmetric ;
+
+ # Empty build requests should expand to empty.
+ assert.result
+ : build-request.expand-no-defaults ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ <variant>debug/<toolset>msvc/<stdlib>stlport
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
+ : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
+
+ assert.result
+ <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
+ : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
+
+ local r ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ try ;
+ {
+ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+ }
+ catch \"static\" is not a value of an implicit feature ;
+
+ r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : target ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+ gcc/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+ borland/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+
+ feature.finish-test build-request-test-temp ;
+}
diff --git a/jam-files/boost-build/build/configure.jam b/jam-files/boost-build/build/configure.jam
new file mode 100644
index 000000000..14c1328af
--- /dev/null
+++ b/jam-files/boost-build/build/configure.jam
@@ -0,0 +1,237 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines function to help with two main tasks:
+#
+# - Discovering build-time configuration for the purposes of adjusting
+# build process.
+# - Reporting what is built, and how it is configured.
+
+import targets ;
+import errors ;
+import targets ;
+import sequence ;
+import property ;
+import property-set ;
+import "class" : new ;
+import common ;
+import path ;
+
+rule log-summary ( )
+{
+
+}
+
+.width = 30 ;
+
+rule set-width ( width )
+{
+ .width = $(width) ;
+}
+
+# Declare that the components specified by the parameter exist.
+rule register-components ( components * )
+{
+ .components += $(components) ;
+}
+
+# Declare that the components specified by the parameters will
+# be build.
+rule components-building ( components * )
+{
+ .built-components += $(components) ;
+}
+
+# Report something about component configuration that the
+# user should better know.
+rule log-component-configuration ( component : message )
+{
+ # FIXME: implement per-property-set logs
+ .component-logs.$(component) += $(message) ;
+}
+
+
+
+rule log-check-result ( result )
+{
+ if ! $(.announced-checks)
+ {
+ ECHO "Performing configuration checks\n" ;
+ .announced-checks = 1 ;
+ }
+
+ ECHO $(result) ;
+ #.check-results += $(result) ;
+}
+
+rule log-library-search-result ( library : result )
+{
+ local x = [ PAD " - $(library) : $(result)" : $(.width) ] ;
+ log-check-result "$(x)" ;
+}
+
+rule print-component-configuration ( )
+{
+ local c = [ sequence.unique $(.components) ] ;
+
+ ECHO "\nComponent configuration:\n" ;
+ for c in $(.components)
+ {
+ local s ;
+ if $(c) in $(.built-components)
+ {
+ s = "building" ;
+ }
+ else
+ {
+ s = "not building" ;
+ }
+ ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
+ for local m in $(.component-logs.$(c))
+ {
+ ECHO " -" $(m) ;
+ }
+ }
+ ECHO ;
+}
+
+rule print-configure-checks-summary ( )
+{
+ # FIXME: the problem with that approach is tha
+ # the user sees checks summary when all checks are
+ # done, and has no progress reporting while the
+ # checks are being executed.
+ if $(.check-results)
+ {
+ ECHO "Configuration checks summary\n" ;
+
+ for local r in $(.check-results)
+ {
+ ECHO $(r) ;
+ }
+ ECHO ;
+ }
+}
+
+# Attempt to build a metatarget named by 'metatarget-reference'
+# in context of 'project' with properties 'ps'.
+# Returns non-empty value if build is OK.
+rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
+{
+ local result ;
+
+ if ! $(retry) && ! $(.$(what)-tested.$(ps))
+ {
+ .$(what)-tested.$(ps) = true ;
+
+ local targets = [ targets.generate-from-reference
+ $(metatarget-reference) : $(project) : $(ps) ] ;
+
+ local jam-targets ;
+ for local t in $(targets[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+
+ if ! UPDATE_NOW in [ RULENAMES ]
+ {
+ # Cannot determine. Assume existance.
+ }
+ else
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if [ UPDATE_NOW $(jam-targets) :
+ $(.log-fd) : ignore-minus-n : ignore-minus-q ]
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no" ;
+ }
+ }
+ return $(result) ;
+ }
+ else
+ {
+ return $(.$(what)-supported.$(ps)) ;
+ }
+}
+
+rule builds ( metatarget-reference : properties * : what ? : retry ? )
+{
+ what ?= "$(metatarget-reference) builds" ;
+
+ # FIXME: this should not be hardcoded. Other checks might
+ # want to consider different set of features as relevant.
+ local toolset = [ property.select <toolset> : $(properties) ] ;
+ local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
+ local relevant = [ property.select <target-os> <toolset> $(toolset-version-property)
+ <address-model> <architecture>
+ : $(properties) ] ;
+ local ps = [ property-set.create $(relevant) ] ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+
+ return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ;
+}
+
+
+# Called by Boost.Build startup code to specify name of a file
+# that will receive results of configure checks. This
+# should never be called by users.
+rule set-log-file ( log-file )
+{
+ path.makedirs [ path.parent $(log-file) ] ;
+
+ .log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
+}
+
+# Frontend rules
+
+class check-target-builds-worker
+{
+ import configure ;
+ import property-set ;
+ import targets ;
+ import property ;
+
+ rule __init__ ( target message ? : true-properties * : false-properties * )
+ {
+ self.target = $(target) ;
+ self.message = $(message) ;
+ self.true-properties = $(true-properties) ;
+ self.false-properties = $(false-properties) ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ;
+ }
+}
+
+
+rule check-target-builds ( target message ? : true-properties * : false-properties * )
+{
+ local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties)
+ : $(false-properties) ] ;
+ return <conditional>@$(instance).check ;
+}
+
+IMPORT $(__name__) : check-target-builds : : check-target-builds ;
+
+
diff --git a/jam-files/boost-build/build/feature.jam b/jam-files/boost-build/build/feature.jam
new file mode 100644
index 000000000..6f54adefb
--- /dev/null
+++ b/jam-files/boost-build/build/feature.jam
@@ -0,0 +1,1335 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert : * ;
+import "class" : * ;
+import errors : lol->list ;
+import indirect ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import utility ;
+
+
+local rule setup ( )
+{
+ .all-attributes =
+ implicit
+ composite
+ optional
+ symmetric
+ free
+ incidental
+ path
+ dependency
+ propagated
+ link-incompatible
+ subfeature
+ order-sensitive
+ ;
+
+ .all-features = ;
+ .all-subfeatures = ;
+ .all-top-features = ; # non-subfeatures
+ .all-implicit-values = ;
+}
+setup ;
+
+
+# Prepare a fresh space to test in by moving all global variable settings into
+# the given temporary module and erasing them here.
+#
+rule prepare-test ( temp-module )
+{
+ DELETE_MODULE $(temp-module) ;
+
+ # Transfer globals to temp-module.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ modules.poke $(temp-module) : $(v) : $($(v)) ;
+ $(v) = ;
+ }
+ }
+ setup ;
+}
+
+
+# Clear out all global variables and recover all variables from the given
+# temporary module.
+#
+rule finish-test ( temp-module )
+{
+ # Clear globals.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ $(v) = ;
+ }
+ }
+
+ for local v in [ VARNAMES $(temp-module) ]
+ {
+ $(v) = [ modules.peek $(temp-module) : $(v) ] ;
+ }
+ DELETE_MODULE $(temp-module) ;
+}
+
+
+# Transform features by bracketing any elements which are not already bracketed
+# by "<>".
+#
+local rule grist ( features * )
+{
+ local empty = "" ;
+ return $(empty:G=$(features)) ;
+}
+
+
+# Declare a new feature with the given name, values, and attributes.
+#
+rule feature (
+ name # Feature name.
+ : values * # Allowable values - may be extended later using feature.extend.
+ : attributes * # Feature attributes (e.g. implicit, free, propagated...).
+)
+{
+ name = [ grist $(name) ] ;
+
+ local error ;
+
+ # Check for any unknown attributes.
+ if ! ( $(attributes) in $(.all-attributes) )
+ {
+ error = unknown attributes:
+ [ set.difference $(attributes) : $(.all-attributes) ] ;
+ }
+ else if $(name) in $(.all-features)
+ {
+ error = feature already defined: ;
+ }
+ else if implicit in $(attributes) && free in $(attributes)
+ {
+ error = free features cannot also be implicit ;
+ }
+ else if free in $(attributes) && propagated in $(attributes)
+ {
+ error = free features cannot be propagated ;
+ }
+ else
+ {
+ local m = [ MATCH (.*=.*) : $(values) ] ;
+ if $(m[1])
+ {
+ error = "feature value may not contain '='" ;
+ }
+ }
+
+ if $(error)
+ {
+ errors.error $(error)
+ : "in" feature declaration:
+ : feature [ lol->list $(1) : $(2) : $(3) ] ;
+ }
+
+ $(name).values ?= ;
+ $(name).attributes = $(attributes) ;
+ $(name).subfeatures ?= ;
+ $(attributes).features += $(name) ;
+
+ .all-features += $(name) ;
+ if subfeature in $(attributes)
+ {
+ .all-subfeatures += $(name) ;
+ }
+ else
+ {
+ .all-top-features += $(name) ;
+ }
+ extend $(name) : $(values) ;
+}
+
+
+# Sets the default value of the given feature, overriding any previous default.
+#
+rule set-default ( feature : value )
+{
+ local f = [ grist $(feature) ] ;
+ local a = $($(f).attributes) ;
+ local bad-attribute = ;
+ if free in $(a)
+ {
+ bad-attribute = free ;
+ }
+ else if optional in $(a)
+ {
+ bad-attribute = optional ;
+ }
+ if $(bad-attribute)
+ {
+ errors.error "$(bad-attribute) property $(f) cannot have a default." ;
+ }
+ if ! $(value) in $($(f).values)
+ {
+ errors.error "The specified default value, '$(value)' is invalid"
+ : "allowed values are: " $($(f).values) ;
+ }
+ $(f).default = $(value) ;
+}
+
+
+# Returns the default property values for the given features.
+#
+rule defaults ( features * )
+{
+ local result ;
+ for local f in $(features)
+ {
+ local gf = $(:E=:G=$(f)) ;
+ local a = $($(gf).attributes) ;
+ if ( free in $(a) ) || ( optional in $(a) )
+ {
+ }
+ else
+ {
+ result += $(gf)$($(gf).default) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns true iff all 'names' elements are valid features.
+#
+rule valid ( names + )
+{
+ if $(names) in $(.all-features)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the attibutes of the given feature.
+#
+rule attributes ( feature )
+{
+ return $($(:E=:G=$(feature)).attributes) ;
+}
+
+
+# Returns the values of the given feature.
+#
+rule values ( feature )
+{
+ return $($(:E=:G=$(feature)).values) ;
+}
+
+
+# Returns true iff 'value-string' is a value-string of an implicit feature.
+#
+rule is-implicit-value ( value-string )
+{
+ local v = [ regex.split $(value-string) - ] ;
+ local failed ;
+ if ! $(v[1]) in $(.all-implicit-values)
+ {
+ failed = true ;
+ }
+ else
+ {
+ local feature = $($(v[1]).implicit-feature) ;
+ for local subvalue in $(v[2-])
+ {
+ if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
+ {
+ failed = true ;
+ }
+ }
+ }
+
+ if ! $(failed)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the implicit feature associated with the given implicit value.
+#
+rule implied-feature ( implicit-value )
+{
+ local components = [ regex.split $(implicit-value) "-" ] ;
+
+ local feature = $($(components[1]).implicit-feature) ;
+ if ! $(feature)
+ {
+ errors.error \"$(implicit-value)\" is not a value of an implicit feature ;
+ feature = "" ; # Keep testing happy; it expects a result.
+ }
+ return $(feature) ;
+}
+
+
+local rule find-implied-subfeature ( feature subvalue : value-string ? )
+{
+ # Feature should be of the form <feature-name>.
+ if $(feature) != $(feature:G)
+ {
+ errors.error invalid feature $(feature) ;
+ }
+
+ return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
+}
+
+
+# Given a feature and a value of one of its subfeatures, find the name of the
+# subfeature. If value-string is supplied, looks for implied subfeatures that
+# are specific to that value of feature
+#
+rule implied-subfeature (
+ feature # The main feature name.
+ subvalue # The value of one of its subfeatures.
+ : value-string ? # The value of the main feature.
+)
+{
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
+ : $(value-string) ] ;
+ if ! $(subfeature)
+ {
+ value-string ?= "" ;
+ errors.error \"$(subvalue)\" is not a known subfeature value of
+ $(feature)$(value-string) ;
+ }
+ return $(subfeature) ;
+}
+
+
+# Generate an error if the feature is unknown.
+#
+local rule validate-feature ( feature )
+{
+ if ! $(feature) in $(.all-features)
+ {
+ errors.error unknown feature \"$(feature)\" ;
+ }
+}
+
+
+# Given a feature and its value or just a value corresponding to an implicit
+# feature, returns a property set consisting of all component subfeatures and
+# their values. For example all the following calls:
+#
+# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
+# expand-subfeatures-aux gcc-2.95.2-linux-x86
+#
+# return:
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+local rule expand-subfeatures-aux (
+ feature ? # Feature name or empty if value corresponds to an
+ # implicit property.
+ : value # Feature value.
+ : dont-validate ? # If set, no value string validation will be done.
+)
+{
+ if $(feature)
+ {
+ feature = $(feature) ;
+ }
+
+ if ! $(feature)
+ {
+ feature = [ implied-feature $(value) ] ;
+ }
+ else
+ {
+ validate-feature $(feature) ;
+ }
+ if ! $(dont-validate)
+ {
+ validate-value-string $(feature) $(value) ;
+ }
+
+ local components = [ regex.split $(value) "-" ] ;
+
+ # Get the top-level feature's value.
+ local value = $(components[1]:G=) ;
+
+ local result = $(components[1]:G=$(feature)) ;
+
+ local subvalues = $(components[2-]) ;
+ while $(subvalues)
+ {
+ local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
+ subvalues = $(subvalues[2-]) ;
+
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
+ $(value) ] ;
+
+ # If no subfeature was found reconstitute the value string and use that.
+ if ! $(subfeature)
+ {
+ result = $(components:J=-) ;
+ result = $(result:G=$(feature)) ;
+ subvalues = ; # Stop looping.
+ }
+ else
+ {
+ local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
+ result += $(subvalue:G=$(f)-$(subfeature)) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Make all elements of properties corresponding to implicit features explicit,
+# and express all subfeature values as separate properties in their own right.
+# For example, all of the following properties
+#
+# gcc-2.95.2-linux-x86
+# <toolset>gcc-2.95.2-linux-x86
+#
+# might expand to
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+rule expand-subfeatures (
+ properties * # Property set with elements of the form
+ # <feature>value-string or just value-string in the case
+ # of implicit features.
+ : dont-validate ?
+)
+{
+ local result ;
+ for local p in $(properties)
+ {
+ # Don't expand subfeatures in subfeatures
+ if ! [ MATCH "(:)" : $(p:G) ]
+ {
+ result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for extend, below. Handles the feature case.
+#
+local rule extend-feature ( feature : values * )
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if implicit in $($(feature).attributes)
+ {
+ for local v in $(values)
+ {
+ if $($(v).implicit-feature)
+ {
+ errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
+ }
+ $(v).implicit-feature = $(feature) ;
+ }
+
+ .all-implicit-values += $(values) ;
+ }
+ if ! $($(feature).values)
+ {
+ # This is the first value specified for this feature so make it be the
+ # default.
+ $(feature).default = $(values[1]) ;
+ }
+ $(feature).values += $(values) ;
+}
+
+
+# Checks that value-string is a valid value-string for the given feature.
+#
+rule validate-value-string ( feature value-string )
+{
+ if ! (
+ free in $($(feature).attributes)
+ || ( $(value-string) in $(feature).values )
+ )
+ {
+ local values = $(value-string) ;
+
+ if $($(feature).subfeatures)
+ {
+ if ! ( $(value-string) in $($(feature).values) )
+ && ! ( $(value-string) in $($(feature).subfeatures) )
+ {
+ values = [ regex.split $(value-string) - ] ;
+ }
+ }
+
+ if ! ( $(values[1]) in $($(feature).values) ) &&
+
+ # An empty value is allowed for optional features.
+ ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
+ {
+ errors.error \"$(values[1])\" is not a known value of feature $(feature)
+ : legal values: \"$($(feature).values)\" ;
+ }
+
+ for local v in $(values[2-])
+ {
+ # This will validate any subfeature values in value-string.
+ implied-subfeature $(feature) $(v) : $(values[1]) ;
+ }
+ }
+}
+
+
+# A helper that computes:
+# * name(s) of module-local variable(s) used to record the correspondence
+# between subvalue(s) and a subfeature
+# * value of that variable when such a subfeature/subvalue has been defined and
+# returns a list consisting of the latter followed by the former.
+#
+local rule subvalue-var (
+ feature # Main feature name.
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the subfeature values are valid.
+ : subfeature # Subfeature name.
+ : subvalues * # Subfeature values.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if $(value-string)
+ {
+ validate-value-string $(feature) $(value-string) ;
+ }
+
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ return $(subfeature-name)
+ $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
+}
+
+
+# Extends the given subfeature with the subvalues. If the optional value-string
+# is provided, the subvalues are only valid for the given value of the feature.
+# Thus, you could say that <target-platform>mingw is specific to
+# <toolset>gcc-2.95.2 as follows:
+#
+# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+#
+rule extend-subfeature (
+ feature # The feature whose subfeature is being extended.
+
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the new subfeature values are valid.
+
+ : subfeature # Subfeature name.
+ : subvalues * # Additional subfeature values.
+)
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalues) ] ;
+
+ local f = [ utility.ungrist [ grist $(feature) ] ] ;
+ extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
+
+ # Provide a way to get from the given feature or property and subfeature
+ # value to the subfeature name.
+ $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
+}
+
+
+# Returns true iff the subvalues are valid for the feature. When the optional
+# value-string is provided, returns true iff the subvalues are valid for the
+# given value of the feature.
+#
+rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalue) ] ;
+
+ if $($(subfeature-vars[2])) = $(subfeature-vars[1])
+ {
+ return true ;
+ }
+}
+
+
+# Can be called three ways:
+#
+# 1. extend feature : values *
+# 2. extend <feature> subfeature : values *
+# 3. extend <feature>value-string subfeature : values *
+#
+# * Form 1 adds the given values to the given feature.
+# * Forms 2 and 3 add subfeature values to the given feature.
+# * Form 3 adds the subfeature values as specific to the given property
+# value-string.
+#
+rule extend ( feature-or-property subfeature ? : values * )
+{
+ local feature ; # If a property was specified this is its feature.
+ local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
+
+ # If a property was specified.
+ if $(feature-or-property:G) && $(feature-or-property:G=)
+ {
+ # Extract the feature and value-string, if any.
+ feature = $(feature-or-property:G) ;
+ value-string = $(feature-or-property:G=) ;
+ }
+ else
+ {
+ feature = [ grist $(feature-or-property) ] ;
+ }
+
+ # Dispatch to the appropriate handler.
+ if $(subfeature)
+ {
+ extend-subfeature $(feature) $(value-string) : $(subfeature)
+ : $(values) ;
+ }
+ else
+ {
+ # If no subfeature was specified, we do not expect to see a
+ # value-string.
+ if $(value-string)
+ {
+ errors.error can only specify a property as the first argument when
+ extending a subfeature
+ : usage:
+ : " extend" feature ":" values...
+ : " | extend" <feature>value-string subfeature ":" values...
+ ;
+ }
+
+ extend-feature $(feature) : $(values) ;
+ }
+}
+
+
+local rule get-subfeature-name ( subfeature value-string ? )
+{
+ local prefix = $(value-string): ;
+ return $(prefix:E="")$(subfeature) ;
+}
+
+
+# Declares a subfeature.
+#
+rule subfeature (
+ feature # Root feature that is not a subfeature.
+ value-string ? # A value-string specifying which feature or subfeature
+ # values this subfeature is specific to, if any.
+ : subfeature # The name of the subfeature being declared.
+ : subvalues * # The allowed values of this subfeature.
+ : attributes * # The attributes of the subfeature.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+
+ # Add grist to the subfeature name if a value-string was supplied.
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ if $(subfeature-name) in $($(feature).subfeatures)
+ {
+ errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
+ "specific to "$(value-string) ;
+ }
+ $(feature).subfeatures += $(subfeature-name) ;
+
+ # First declare the subfeature as a feature in its own right.
+ local f = [ utility.ungrist $(feature) ] ;
+ feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
+
+ # Now make sure the subfeature values are known.
+ extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
+}
+
+
+# Set components of the given composite property.
+#
+rule compose ( composite-property : component-properties * )
+{
+ local feature = $(composite-property:G) ;
+ if ! ( composite in [ attributes $(feature) ] )
+ {
+ errors.error "$(feature)" is not a composite feature ;
+ }
+
+ $(composite-property).components ?= ;
+ if $($(composite-property).components)
+ {
+ errors.error components of "$(composite-property)" already set:
+ $($(composite-property).components) ;
+ }
+
+ if $(composite-property) in $(component-properties)
+ {
+ errors.error composite property "$(composite-property)" cannot have itself as a component ;
+ }
+ $(composite-property).components = $(component-properties) ;
+}
+
+
+local rule expand-composite ( property )
+{
+ return $(property)
+ [ sequence.transform expand-composite : $($(property).components) ] ;
+}
+
+
+# Return all values of the given feature specified by the given property set.
+#
+rule get-values ( feature : properties * )
+{
+ local result ;
+
+ feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ # Use MATCH instead of :G= to get the value, in order to preserve
+ # the value intact instead of having bjam treat it as a decomposable
+ # path.
+ result += [ MATCH ">(.*)" : $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule free-features ( )
+{
+ return $(free.features) ;
+}
+
+
+# Expand all composite properties in the set so that all components are
+# explicitly expressed.
+#
+rule expand-composites ( properties * )
+{
+ local explicit-features = $(properties:G) ;
+ local result ;
+
+ # Now expand composite features.
+ for local p in $(properties)
+ {
+ local expanded = [ expand-composite $(p) ] ;
+
+ for local x in $(expanded)
+ {
+ if ! $(x) in $(result)
+ {
+ local f = $(x:G) ;
+
+ if $(f) in $(free.features)
+ {
+ result += $(x) ;
+ }
+ else if ! $(x) in $(properties) # x is the result of expansion
+ {
+ if ! $(f) in $(explicit-features) # not explicitly-specified
+ {
+ if $(f) in $(result:G)
+ {
+ errors.error expansions of composite features result
+ in conflicting values for $(f)
+ : values: [ get-values $(f) : $(result) ] $(x:G=)
+ : one contributing composite property was $(p) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ else if $(f) in $(result:G)
+ {
+ errors.error explicitly-specified values of non-free feature
+ $(f) conflict :
+ "existing values:" [ get-values $(f) : $(properties) ] :
+ "value from expanding " $(p) ":" $(x:G=) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return true iff f is an ordinary subfeature of the parent-property's feature,
+# or if f is a subfeature of the parent-property's feature specific to the
+# parent-property's value.
+#
+local rule is-subfeature-of ( parent-property f )
+{
+ if subfeature in $($(f).attributes)
+ {
+ local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
+ if $(specific-subfeature)
+ {
+ # The feature has the form <topfeature-topvalue:subfeature>, e.g.
+ # <toolset-msvc:version>.
+ local feature-value = [ split-top-feature $(specific-subfeature[1])
+ ] ;
+ if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ # The feature has the form <topfeature-subfeature>, e.g.
+ # <toolset-version>
+ local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
+ if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
+ {
+ return true ;
+ }
+ }
+ }
+}
+
+
+# As for is-subfeature-of but for subproperties.
+#
+local rule is-subproperty-of ( parent-property p )
+{
+ return [ is-subfeature-of $(parent-property) $(p:G) ] ;
+}
+
+
+# Given a property, return the subset of features consisting of all ordinary
+# subfeatures of the property's feature, and all specific subfeatures of the
+# property's feature which are conditional on the property's value.
+#
+local rule select-subfeatures ( parent-property : features * )
+{
+ return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
+}
+
+
+# As for select-subfeatures but for subproperties.
+#
+local rule select-subproperties ( parent-property : properties * )
+{
+ return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
+}
+
+
+# Given a property set which may consist of composite and implicit properties
+# and combined subfeature values, returns an expanded, normalized property set
+# with all implicit features expressed explicitly, all subfeature values
+# individually expressed, and all components of composite properties expanded.
+# Non-free features directly expressed in the input properties cause any values
+# of those features due to composite feature expansion to be dropped. If two
+# values of a given non-free feature are directly expressed in the input, an
+# error is issued.
+#
+rule expand ( properties * )
+{
+ local expanded = [ expand-subfeatures $(properties) ] ;
+ return [ expand-composites $(expanded) ] ;
+}
+
+
+# Helper rule for minimize. Returns true iff property's feature is present in
+# the contents of the variable named by feature-set-var.
+#
+local rule in-features ( feature-set-var property )
+{
+ if $(property:G) in $($(feature-set-var))
+ {
+ return true ;
+ }
+}
+
+
+# Helper rule for minimize. Returns the list with the same properties, but with
+# all subfeatures moved to the end of the list.
+#
+local rule move-subfeatures-to-the-end ( properties * )
+{
+ local x1 ;
+ local x2 ;
+ for local p in $(properties)
+ {
+ if subfeature in $($(p:G).attributes)
+ {
+ x2 += $(p) ;
+ }
+ else
+ {
+ x1 += $(p) ;
+ }
+ }
+ return $(x1) $(x2) ;
+}
+
+
+# Given an expanded property set, eliminate all redundancy: properties that are
+# elements of other (composite) properties in the set will be eliminated.
+# Non-symmetric properties equal to default values will be eliminated unless
+# they override a value from some composite property. Implicit properties will
+# be expressed without feature grist, and sub-property values will be expressed
+# as elements joined to the corresponding main property.
+#
+rule minimize ( properties * )
+{
+ # Precondition checking
+ local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
+ if $(implicits)
+ {
+ errors.error minimize requires an expanded property set, but
+ \"$(implicits[1])\" appears to be the value of an un-expanded
+ implicit feature ;
+ }
+
+ # Remove properties implied by composite features.
+ local components = $($(properties).components) ;
+ local x = [ set.difference $(properties) : $(components) ] ;
+
+ # Handle subfeatures and implicit features.
+ x = [ move-subfeatures-to-the-end $(x) ] ;
+ local result ;
+ while $(x)
+ {
+ local p fullp = $(x[1]) ;
+ local f = $(p:G) ;
+ local v = $(p:G=) ;
+
+ # Eliminate features in implicit properties.
+ if implicit in [ attributes $(f) ]
+ {
+ p = $(v) ;
+ }
+
+ # Locate all subproperties of $(x[1]) in the property set.
+ local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
+ if $(subproperties)
+ {
+ # Reconstitute the joined property name.
+ local sorted = [ sequence.insertion-sort $(subproperties) ] ;
+ result += $(p)-$(sorted:G="":J=-) ;
+
+ x = [ set.difference $(x[2-]) : $(subproperties) ] ;
+ }
+ else
+ {
+ # Eliminate properties whose value is equal to feature's default,
+ # which are not symmetric and which do not contradict values implied
+ # by composite properties.
+
+ # Since all component properties of composites in the set have been
+ # eliminated, any remaining property whose feature is the same as a
+ # component of a composite in the set must have a non-redundant
+ # value.
+ if $(fullp) != [ defaults $(f) ]
+ || symmetric in [ attributes $(f) ]
+ || $(fullp:G) in $(components:G)
+ {
+ result += $(p) ;
+ }
+
+ x = $(x[2-]) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Combine all subproperties into their parent properties
+#
+# Requires: for every subproperty, there is a parent property. All features are
+# explicitly expressed.
+#
+# This rule probably should not be needed, but build-request.expand-no-defaults
+# is being abused for unintended purposes and it needs help.
+#
+rule compress-subproperties ( properties * )
+{
+ local all-subs ;
+ local matched-subs ;
+ local result ;
+
+ for local p in $(properties)
+ {
+ if ! $(p:G)
+ {
+ # Expecting fully-gristed properties.
+ assert.variable-not-empty p:G ;
+ }
+
+ if ! subfeature in $($(p:G).attributes)
+ {
+ local subs = [ sequence.insertion-sort
+ [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
+
+ matched-subs += $(subs) ;
+
+ local subvalues = -$(subs:G=:J=-) ;
+ subvalues ?= "" ;
+ result += $(p)$(subvalues) ;
+ }
+ else
+ {
+ all-subs += $(p) ;
+ }
+ }
+ assert.result true : set.equal $(all-subs) : $(matched-subs) ;
+ return $(result) ;
+}
+
+
+# Given an ungristed string, finds the longest prefix which is a top-level
+# feature name followed by a dash, and return a pair consisting of the parts
+# before and after that dash. More interesting than a simple split because
+# feature names may contain dashes.
+#
+local rule split-top-feature ( feature-plus )
+{
+ local e = [ regex.split $(feature-plus) - ] ;
+ local f = $(e[1]) ;
+ local v ;
+ while $(e)
+ {
+ if <$(f)> in $(.all-top-features)
+ {
+ v = $(f) $(e[2-]:J=-) ;
+ }
+ e = $(e[2-]) ;
+ f = $(f)-$(e[1]) ;
+ }
+ return $(v) ;
+}
+
+
+# Given a set of properties, add default values for features not represented in
+# the set.
+#
+# Note: if there's an ordinary feature F1 and a composite feature F2 which
+# includes some value for F1 and both feature have default values then the
+# default value of F1 will be added (as opposed to the value in F2). This might
+# not be the right idea, e.g. consider:
+#
+# feature variant : debug ... ;
+# <variant>debug : .... <runtime-debugging>on
+# feature <runtime-debugging> : off on ;
+#
+# Here, when adding default for an empty property set, we'll get
+#
+# <variant>debug <runtime_debugging>off
+#
+# and that's kind of strange.
+#
+rule add-defaults ( properties * )
+{
+ for local v in $(properties:G=)
+ {
+ if $(v) in $(properties)
+ {
+ errors.error add-defaults requires explicitly specified features,
+ but \"$(v)\" appears to be the value of an un-expanded implicit
+ feature ;
+ }
+ }
+ # We don't add default for elements with ":" inside. This catches:
+ # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
+ # to be takes as specified value for <variant>
+ # 2. Free properties with ":" in values. We don't care, since free
+ # properties don't have defaults.
+ local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
+ local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
+ local more = [ defaults $(missing-top) ] ;
+ properties += $(more) ;
+ xproperties += $(more) ;
+
+ # Add defaults for subfeatures of features which are present.
+ for local p in $(xproperties)
+ {
+ local s = $($(p:G).subfeatures) ;
+ local f = [ utility.ungrist $(p:G) ] ;
+ local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
+ properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
+ }
+
+ return $(properties) ;
+}
+
+
+# Given a property-set of the form
+# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+#
+# Returns
+# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+#
+# Note that vN...vM may contain slashes. This needs to be resilient to the
+# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
+# slash direction on NT.
+#
+rule split ( property-set )
+{
+ local pieces = [ regex.split $(property-set) [\\/] ] ;
+ local result ;
+
+ for local x in $(pieces)
+ {
+ if ( ! $(x:G) ) && $(result[-1]:G)
+ {
+ result = $(result[1--2]) $(result[-1])/$(x) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Tests of module feature.
+#
+rule __test__ ( )
+{
+ # Use a fresh copy of the feature module.
+ prepare-test feature-test-temp ;
+
+ import assert ;
+ import errors : try catch ;
+
+ # These are local rules and so must be explicitly reimported into the
+ # testing module.
+ import feature : extend-feature validate-feature select-subfeatures ;
+
+ feature toolset : gcc : implicit ;
+ feature define : : free ;
+ feature runtime-link : dynamic static : symmetric ;
+ feature optimization : on off ;
+ feature variant : debug release profile : implicit composite symmetric ;
+ feature stdlib : native stlport ;
+ feature magic : : free ;
+
+ compose <variant>debug : <define>_DEBUG <optimization>off ;
+ compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ assert.result dynamic static : values <runtime-link> ;
+ assert.result dynamic static : values runtime-link ;
+
+ try ;
+ {
+ compose <variant>profile : <variant>profile ;
+ }
+ catch composite property <variant>profile cannot have itself as a component ;
+
+ extend-feature toolset : msvc metrowerks ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
+
+ assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : gcc : version : 1.1 ;
+
+ assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : : version : yabba ;
+
+ feature yabba ;
+ subfeature yabba : version : dabba ;
+ assert.true is-subvalue yabba : : version : dabba ;
+
+ subfeature toolset gcc : platform : linux cygwin : optional ;
+
+ assert.result <toolset-gcc:version>
+ : select-subfeatures <toolset>gcc
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib> ;
+
+ subfeature stdlib : version : 3 4 : optional ;
+
+ assert.result <stdlib-version>
+ : select-subfeatures <stdlib>native
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib-version> ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures <toolset>gcc-3.0.1 ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
+ : expand-subfeatures <toolset>gcc-3.0.1-linux ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
+
+ assert.result <define>foo=x-y
+ : expand-subfeatures <define>foo=x-y ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures gcc-3.0.1 ;
+
+ assert.result a c e
+ : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ <variant>debug <define>_DEBUG <optimization>on
+ : expand gcc-3.0.1 debug <optimization>on ;
+
+ assert.result <variant>debug <define>_DEBUG <optimization>on
+ : expand debug <optimization>on ;
+
+ assert.result <optimization>on <variant>debug <define>_DEBUG
+ : expand <optimization>on debug ;
+
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults <runtime-link> <define> <optimization> ;
+
+ # Make sure defaults is resilient to missing grist.
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults runtime-link define optimization ;
+
+ feature dummy : dummy1 dummy2 ;
+ subfeature dummy : subdummy : x y z : optional ;
+
+ feature fu : fu1 fu2 : optional ;
+ subfeature fu : subfu : x y z : optional ;
+ subfeature fu : subfu2 : q r s ;
+
+ assert.result optional : attributes <fu> ;
+ assert.result optional : attributes fu ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
+ <dummy>dummy1 <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
+ <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 ;
+
+ set-default <runtime-link> : static ;
+ assert.result <runtime-link>static : defaults <runtime-link> ;
+
+ assert.result gcc-3.0.1 debug <optimization>on
+ : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
+
+ assert.result gcc-3.0.1 debug <runtime-link>dynamic
+ : minimize
+ [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
+
+ assert.result gcc-3.0.1 debug
+ : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
+
+ assert.result debug <optimization>on
+ : minimize [ expand debug <optimization>on ] ;
+
+ assert.result gcc-3.0
+ : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
+
+ assert.result gcc-3.0
+ : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y/z/<a>b/c/<d>e/f ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a/b/c/<d>e/f/g/<h>i/j/k ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
+
+ # Test error checking.
+
+ try ;
+ {
+ expand release <optimization>off <optimization>on ;
+ }
+ catch explicitly-specified values of non-free feature <optimization> conflict ;
+
+ try ;
+ {
+ validate-feature <foobar> ;
+ }
+ catch unknown feature ;
+
+ validate-value-string <toolset> gcc ;
+ validate-value-string <toolset> gcc-3.0.1 ;
+
+ try ;
+ {
+ validate-value-string <toolset> digital_mars ;
+ }
+ catch \"digital_mars\" is not a known value of <toolset> ;
+
+ try ;
+ {
+ feature foobar : : baz ;
+ }
+ catch unknown attributes: baz ;
+
+ feature feature1 ;
+ try ;
+ {
+ feature feature1 ;
+ }
+ catch feature already defined: ;
+
+ try ;
+ {
+ feature feature2 : : free implicit ;
+ }
+ catch free features cannot also be implicit ;
+
+ try ;
+ {
+ feature feature3 : : free propagated ;
+ }
+ catch free features cannot be propagated ;
+
+ try ;
+ {
+ implied-feature lackluster ;
+ }
+ catch \"lackluster\" is not a value of an implicit feature ;
+
+ try ;
+ {
+ implied-subfeature <toolset> 3.0.1 ;
+ }
+ catch \"3.0.1\" is not a known subfeature value of <toolset> ;
+
+ try ;
+ {
+ implied-subfeature <toolset> not-a-version : gcc ;
+ }
+ catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
+
+ # Leave a clean copy of the features module behind.
+ finish-test feature-test-temp ;
+}
diff --git a/jam-files/boost-build/build/generators.jam b/jam-files/boost-build/build/generators.jam
new file mode 100644
index 000000000..1515525f2
--- /dev/null
+++ b/jam-files/boost-build/build/generators.jam
@@ -0,0 +1,1408 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources to
+# targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties. It
+# starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked
+# and a set of the most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the
+# properties and list of sources. Each one selects a target which can be
+# directly consumed, and tries to convert the remaining ones to the types it can
+# consume. This is done by recursively calling 'construct' with all consumable
+# types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as a result.
+#
+# It is quite possible for 'construct' to return more targets that it was asked
+# for. For example, if it were asked to generate a target of type EXE, but the
+# only found generator produces both EXE and TDS (file with debug) information.
+# The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can
+# get more targets that it was asked for. The question is what to do with extra
+# targets. Boost.Build attempts to convert them to requested types, and attempts
+# that as early as possible. Specifically, this is done after invoking each
+# generator. TODO: An example is needed to document the rationale for trying
+# extra target conversion at that point.
+#
+# In order for the system to be able to use a specific generator instance 'when
+# needed', the instance needs to be registered with the system using
+# generators.register() or one of its related rules. Unregistered generators may
+# only be run explicitly and will not be considered by Boost.Build when when
+# converting between given target types.
+
+import "class" : new ;
+import errors ;
+import property-set ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+import virtual-target ;
+
+
+if "--debug-generators" in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+
+# Updated cached viable source target type information as needed after a new
+# target type gets defined. This is needed because if a target type is a viable
+# source target type for some generator then all of the target type's derived
+# target types should automatically be considered as viable source target types
+# for the same generator as well. Does nothing if a non-derived target type is
+# passed to it.
+#
+rule update-cached-information-with-a-new-type ( type )
+{
+ local base-type = [ type.base $(type) ] ;
+ if $(base-type)
+ {
+ for local g in $(.vstg-cached-generators)
+ {
+ if $(base-type) in $(.vstg.$(g))
+ {
+ .vstg.$(g) += $(type) ;
+ }
+ }
+
+ for local t in $(.vst-cached-types)
+ {
+ if $(base-type) in $(.vst.$(t))
+ {
+ .vst.$(t) += $(type) ;
+ }
+ }
+ }
+}
+
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+local rule invalidate-extendable-viable-source-target-type-cache ( )
+{
+ local generators-with-cached-source-types = $(.vstg-cached-generators) ;
+ .vstg-cached-generators = ;
+ for local g in $(generators-with-cached-source-types)
+ {
+ if $(.vstg.$(g)) = *
+ {
+ .vstg-cached-generators += $(g) ;
+ }
+ else
+ {
+ .vstg.$(g) = ;
+ }
+ }
+
+ local types-with-cached-source-types = $(.vst-cached-types) ;
+ .vst-cached-types = ;
+ for local t in $(types-with-cached-source-types)
+ {
+ if $(.vst.$(t)) = *
+ {
+ .vst-cached-types += $(t) ;
+ }
+ else
+ {
+ .vst.$(t) = ;
+ }
+ }
+}
+
+
+# Outputs a debug message if generators debugging is on. Each element of
+# 'message' is checked to see if it is a class instance. If so, instead of the
+# value, the result of 'str' call is output.
+#
+local rule generators.dout ( message * )
+{
+ if $(.debug)
+ {
+ ECHO [ sequence.transform utility.str : $(message) ] ;
+ }
+}
+
+
+local rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+local rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+local rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Models a generator.
+#
+class generator
+{
+ import generators : indent increase-indent decrease-indent generators.dout ;
+ import set ;
+ import utility ;
+ import feature ;
+ import errors ;
+ import sequence ;
+ import type ;
+ import virtual-target ;
+ import "class" : new ;
+ import property ;
+ import path ;
+
+ EXPORT class@generator : indent increase-indent decrease-indent
+ generators.dout ;
+
+ rule __init__ (
+ id # Identifies the generator - should be name
+ # of the rule which sets up the build
+ # actions.
+
+ composing ? # Whether generator processes each source
+ # target in turn, converting it to required
+ # types. Ordinary generators pass all
+ # sources together to the recursive
+ # generators.construct-types call.
+
+ : source-types * # Types that this generator can handle. If
+ # empty, the generator can consume anything.
+
+ : target-types-and-names + # Types the generator will create and,
+ # optionally, names for created targets.
+ # Each element should have the form
+ # type["(" name-pattern ")"], for example,
+ # obj(%_x). Generated target name will be
+ # found by replacing % with the name of
+ # source, provided an explicit name was not
+ # specified.
+
+ : requirements *
+ )
+ {
+ self.id = $(id) ;
+ self.rule-name = $(id) ;
+ self.composing = $(composing) ;
+ self.source-types = $(source-types) ;
+ self.target-types-and-names = $(target-types-and-names) ;
+ self.requirements = $(requirements) ;
+
+ for local e in $(target-types-and-names)
+ {
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed to
+ # mapping), because given target type might occur several times, for
+ # example "H H(%_symbols)".
+ local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ;
+ self.target-types += $(m[1]) ;
+ self.name-prefix += $(m[3]:E="") ;
+ self.name-postfix += $(m[4]:E="") ;
+ }
+
+ # Note that 'transform' here, is the same as 'for_each'.
+ sequence.transform type.validate : $(self.source-types) ;
+ sequence.transform type.validate : $(self.target-types) ;
+ }
+
+ ################# End of constructor #################
+
+ rule id ( )
+ {
+ return $(self.id) ;
+ }
+
+ # Returns the list of target type the generator accepts.
+ #
+ rule source-types ( )
+ {
+ return $(self.source-types) ;
+ }
+
+ # Returns the list of target types that this generator produces. It is
+ # assumed to be always the same -- i.e. it can not change depending on some
+ # provided list of sources.
+ #
+ rule target-types ( )
+ {
+ return $(self.target-types) ;
+ }
+
+ # Returns the required properties for this generator. Properties in returned
+ # set must be present in build properties if this generator is to be used.
+ # If result has grist-only element, that build properties must include some
+ # value of that feature.
+ #
+ # XXX: remove this method?
+ #
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule set-rule-name ( rule-name )
+ {
+ self.rule-name = $(rule-name) ;
+ }
+
+ rule rule-name ( )
+ {
+ return $(self.rule-name) ;
+ }
+
+ # Returns a true value if the generator can be run with the specified
+ # properties.
+ #
+ rule match-rank ( property-set-to-match )
+ {
+ # See if generator requirements are satisfied by 'properties'. Treat a
+ # feature name in requirements (i.e. grist-only element), as matching
+ # any value of the feature.
+ local all-requirements = [ requirements ] ;
+
+ local property-requirements feature-requirements ;
+ for local r in $(all-requirements)
+ {
+ if $(r:G=)
+ {
+ property-requirements += $(r) ;
+ }
+ else
+ {
+ feature-requirements += $(r) ;
+ }
+ }
+
+ local properties-to-match = [ $(property-set-to-match).raw ] ;
+ if $(property-requirements) in $(properties-to-match) &&
+ $(feature-requirements) in $(properties-to-match:G)
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns another generator which differs from $(self) in
+ # - id
+ # - value to <toolset> feature in properties
+ #
+ rule clone ( new-id : new-toolset-properties + )
+ {
+ local g = [ new $(__class__) $(new-id) $(self.composing) :
+ $(self.source-types) : $(self.target-types-and-names) :
+ # Note: this does not remove any subfeatures of <toolset> which
+ # might cause problems.
+ [ property.change $(self.requirements) : <toolset> ]
+ $(new-toolset-properties) ] ;
+ return $(g) ;
+ }
+
+ # Creates another generator that is the same as $(self), except that if
+ # 'base' is in target types of $(self), 'type' will in target types of the
+ # new generator.
+ #
+ rule clone-and-change-target-type ( base : type )
+ {
+ local target-types ;
+ for local t in $(self.target-types-and-names)
+ {
+ local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ;
+ if $(m) = $(base)
+ {
+ target-types += $(type)$(m[2]:E="") ;
+ }
+ else
+ {
+ target-types += $(t) ;
+ }
+ }
+
+ local g = [ new $(__class__) $(self.id) $(self.composing) :
+ $(self.source-types) : $(target-types) : $(self.requirements) ] ;
+ if $(self.rule-name)
+ {
+ $(g).set-rule-name $(self.rule-name) ;
+ }
+ return $(g) ;
+ }
+
+ # Tries to invoke this generator on the given sources. Returns a list of
+ # generated targets (instances of 'virtual-target') and optionally a set of
+ # properties to be added to the usage-requirements for all the generated
+ # targets. Returning nothing from run indicates that the generator was
+ # unable to create the target.
+ #
+ rule run
+ (
+ project # Project for which the targets are generated.
+ name ? # Used when determining the 'name' attribute for all
+ # generated targets. See the 'generated-targets' method.
+ : property-set # Desired properties for generated targets.
+ : sources + # Source targets.
+ )
+ {
+ generators.dout [ indent ] " ** generator" $(self.id) ;
+ generators.dout [ indent ] " composing:" $(self.composing) ;
+
+ if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
+ {
+ errors.error "Unsupported source/source-type combination" ;
+ }
+
+ # We do not run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only at
+ # the top-level of a transformation graph, or if their name is passed
+ # explicitly. Thus, we dissallow composing generators in the middle. For
+ # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
+ # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
+ if ! $(self.composing) || $(name)
+ {
+ run-really $(project) $(name) : $(property-set) : $(sources) ;
+ }
+ }
+
+ rule run-really ( project name ? : property-set : sources + )
+ {
+ # Targets that this generator will consume directly.
+ local consumed = ;
+ # Targets that can not be consumed and will be returned as-is.
+ local bypassed = ;
+
+ if $(self.composing)
+ {
+ convert-multiple-sources-to-consumable-types $(project)
+ : $(property-set) : $(sources) : consumed bypassed ;
+ }
+ else
+ {
+ convert-to-consumable-types $(project) $(name) : $(property-set)
+ : $(sources) : : consumed bypassed ;
+ }
+
+ local result ;
+ if $(consumed)
+ {
+ result = [ construct-result $(consumed) : $(project) $(name) :
+ $(property-set) ] ;
+ }
+
+ if $(result)
+ {
+ generators.dout [ indent ] " SUCCESS: " $(result) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " FAILURE" ;
+ }
+ generators.dout ;
+ return $(result) ;
+ }
+
+ # Constructs the dependency graph to be returned by this generator.
+ #
+ rule construct-result
+ (
+ consumed + # Already prepared list of consumable targets.
+ # Composing generators may receive multiple sources
+ # all of which will have types matching those in
+ # $(self.source-types). Non-composing generators with
+ # multiple $(self.source-types) will receive exactly
+ # len $(self.source-types) sources with types matching
+ # those in $(self.source-types). And non-composing
+ # generators with only a single source type may
+ # receive multiple sources with all of them of the
+ # type listed in $(self.source-types).
+ : project name ?
+ : property-set # Properties to be used for all actions created here.
+ )
+ {
+ local result ;
+ # If this is 1->1 transformation, apply it to all consumed targets in
+ # order.
+ if ! $(self.source-types[2]) && ! $(self.composing)
+ {
+ for local r in $(consumed)
+ {
+ result += [ generated-targets $(r) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ }
+ else if $(consumed)
+ {
+ result += [ generated-targets $(consumed) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+ #
+ rule determine-target-name ( fullname : prefix ? : postfix ? )
+ {
+ # See if we need to add directory to the target name.
+ local dir = $(fullname:D) ;
+ local name = $(fullname:B) ;
+
+ name = $(prefix:E=)$(name) ;
+ name = $(name)$(postfix:E=) ;
+
+ if $(dir) &&
+ # Never append '..' to target path.
+ ! [ MATCH .*(\\.\\.).* : $(dir) ]
+ &&
+ ! [ path.is-rooted $(dir) ]
+ {
+ # Relative path is always relative to the source
+ # directory. Retain it, so that users can have files
+ # with the same in two different subdirectories.
+ name = $(dir)/$(name) ;
+ }
+ return $(name) ;
+ }
+
+ # Determine the name of the produced target from the names of the sources.
+ #
+ rule determine-output-name ( sources + )
+ {
+ # The simple case if when a name of source has single dot. Then, we take
+ # the part before dot. Several dots can be caused by:
+ # - using source file like a.host.cpp, or
+ # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
+ # extension 'host.cpp'.
+ # In the first case, we want to take the part up to the last dot. In the
+ # second case -- not sure, but for now take the part up to the last dot
+ # too.
+ name = [ utility.basename [ $(sources[1]).name ] ] ;
+
+ for local s in $(sources[2])
+ {
+ local n2 = [ utility.basename [ $(s).name ] ] ;
+ if $(n2) != $(name)
+ {
+ errors.error "$(self.id): source targets have different names: cannot determine target name" ;
+ }
+ }
+ name = [ determine-target-name [ $(sources[1]).name ] ] ;
+ return $(name) ;
+ }
+
+ # Constructs targets that are created after consuming 'sources'. The result
+ # will be the list of virtual-target, which has the same length as the
+ # 'target-types' attribute and with corresponding types.
+ #
+ # When 'name' is empty, all source targets must have the same 'name'
+ # attribute value, which will be used instead of the 'name' argument.
+ #
+ # The 'name' attribute value for each generated target will be equal to
+ # the 'name' parameter if there is no name pattern for this type. Otherwise,
+ # the '%' symbol in the name pattern will be replaced with the 'name'
+ # parameter to obtain the 'name' attribute.
+ #
+ # For example, if targets types are T1 and T2 (with name pattern "%_x"),
+ # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
+ # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
+ # determines the basename of a file.
+ #
+ # Note that this pattern mechanism has nothing to do with implicit patterns
+ # in make. It is a way to produce a target whose name is different than the
+ # name of its source.
+ #
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if ! $(name)
+ {
+ name = [ determine-output-name $(sources) ] ;
+ }
+
+ # Assign an action for each target.
+ local action = [ action-class ] ;
+ local a = [ class.new $(action) $(sources) : $(self.rule-name) :
+ $(property-set) ] ;
+
+ # Create generated target for each target type.
+ local targets ;
+ local pre = $(self.name-prefix) ;
+ local post = $(self.name-postfix) ;
+ for local t in $(self.target-types)
+ {
+ local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
+ generated-name = $(generated-name:R=$(name:D)) ;
+ pre = $(pre[2-]) ;
+ post = $(post[2-]) ;
+
+ targets += [ class.new file-target $(generated-name) : $(t) :
+ $(project) : $(a) ] ;
+ }
+
+ return [ sequence.transform virtual-target.register : $(targets) ] ;
+ }
+
+ # Attempts to convert 'sources' to targets of types that this generator can
+ # handle. The intention is to produce the set of targets that can be used
+ # when the generator is run.
+ #
+ rule convert-to-consumable-types
+ (
+ project name ?
+ : property-set
+ : sources +
+ : only-one ? # Convert 'source' to only one of the source types. If
+ # there is more that one possibility, report an error.
+ : consumed-var # Name of the variable which receives all targets which
+ # can be consumed.
+ bypassed-var # Name of the variable which receives all targets which
+ # can not be consumed.
+ )
+ {
+ # We are likely to be passed 'consumed' and 'bypassed' var names. Use
+ # '_' to avoid name conflicts.
+ local _consumed ;
+ local _bypassed ;
+ local missing-types ;
+
+ if $(sources[2])
+ {
+ # Do not know how to handle several sources yet. Just try to pass
+ # the request to other generator.
+ missing-types = $(self.source-types) ;
+ }
+ else
+ {
+ consume-directly $(sources) : _consumed : missing-types ;
+ }
+
+ # No need to search for transformation if some source type has consumed
+ # source and no more source types are needed.
+ if $(only-one) && $(_consumed)
+ {
+ missing-types = ;
+ }
+
+ # TODO: we should check that only one source type if create of
+ # 'only-one' is true.
+ # TODO: consider if consumed/bypassed separation should be done by
+ # 'construct-types'.
+
+ if $(missing-types)
+ {
+ local transformed = [ generators.construct-types $(project) $(name)
+ : $(missing-types) : $(property-set) : $(sources) ] ;
+
+ # Add targets of right type to 'consumed'. Add others to 'bypassed'.
+ # The 'generators.construct' rule has done its best to convert
+ # everything to the required type. There is no need to rerun it on
+ # targets of different types.
+
+ # NOTE: ignoring usage requirements.
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(missing-types)
+ {
+ _consumed += $(t) ;
+ }
+ else
+ {
+ _bypassed += $(t) ;
+ }
+ }
+ }
+
+ _consumed = [ sequence.unique $(_consumed) ] ;
+ _bypassed = [ sequence.unique $(_bypassed) ] ;
+
+ # Remove elements of '_bypassed' that are in '_consumed'.
+
+ # Suppose the target type of current generator, X is produced from X_1
+ # and X_2, which are produced from Y by one generator. When creating X_1
+ # from Y, X_2 will be added to 'bypassed'. Likewise, when creating X_2
+ # from Y, X_1 will be added to 'bypassed', but they are also in
+ # 'consumed'. We have to remove them from bypassed, so that generators
+ # up the call stack do not try to convert them.
+
+ # In this particular case, X_1 instance in 'consumed' and X_1 instance
+ # in 'bypassed' will be the same: because they have the same source and
+ # action name, and 'virtual-target.register' will not allow two
+ # different instances. Therefore, it is OK to use 'set.difference'.
+
+ _bypassed = [ set.difference $(_bypassed) : $(_consumed) ] ;
+
+ $(consumed-var) += $(_consumed) ;
+ $(bypassed-var) += $(_bypassed) ;
+ }
+
+ # Converts several files to consumable types. Called for composing
+ # generators only.
+ #
+ rule convert-multiple-sources-to-consumable-types ( project : property-set :
+ sources * : consumed-var bypassed-var )
+ {
+ # We process each source one-by-one, trying to convert it to a usable
+ # type.
+ for local source in $(sources)
+ {
+ local _c ;
+ local _b ;
+ # TODO: need to check for failure on each source.
+ convert-to-consumable-types $(project) : $(property-set) : $(source)
+ : true : _c _b ;
+ if ! $(_c)
+ {
+ generators.dout [ indent ] " failed to convert " $(source) ;
+ }
+ $(consumed-var) += $(_c) ;
+ $(bypassed-var) += $(_b) ;
+ }
+ }
+
+ rule consume-directly ( source : consumed-var : missing-types-var )
+ {
+ local real-source-type = [ $(source).type ] ;
+
+ # If there are no source types, we can consume anything.
+ local source-types = $(self.source-types) ;
+ source-types ?= $(real-source-type) ;
+
+ for local st in $(source-types)
+ {
+ # The 'source' if of the right type already.
+ if $(real-source-type) = $(st) || [ type.is-derived
+ $(real-source-type) $(st) ]
+ {
+ $(consumed-var) += $(source) ;
+ }
+ else
+ {
+ $(missing-types-var) += $(st) ;
+ }
+ }
+ }
+
+ # Returns the class to be used to actions. Default implementation returns
+ # "action".
+ #
+ rule action-class ( )
+ {
+ return "action" ;
+ }
+}
+
+
+# Registers a new generator instance 'g'.
+#
+rule register ( g )
+{
+ .all-generators += $(g) ;
+
+ # A generator can produce several targets of the same type. We want unique
+ # occurrence of that generator in .generators.$(t) in that case, otherwise,
+ # it will be tried twice and we will get a false ambiguity.
+ for local t in [ sequence.unique [ $(g).target-types ] ]
+ {
+ .generators.$(t) += $(g) ;
+ }
+
+ # Update the set of generators for toolset.
+
+ # TODO: should we check that generator with this id is not already
+ # registered. For example, the fop.jam module intentionally declared two
+ # generators with the same id, so such check will break it.
+ local id = [ $(g).id ] ;
+
+ # Some generators have multiple periods in their name, so a simple $(id:S=)
+ # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
+ # then .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate value from
+ # .generators-for-toolset.gcc. Correcting this makes generator inheritance
+ # work properly. See also inherit-generators in the toolset module.
+ local base = $(id) ;
+ while $(base:S)
+ {
+ base = $(base:B) ;
+ }
+ .generators-for-toolset.$(base) += $(g) ;
+
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
+ # mailing list thread for an even more advanced idea of how we could convert
+ # Boost Build's Jamfile processing, target selection and generator selection
+ # into separate steps which would prevent these caches from ever being
+ # invalidated.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate-extendable-viable-source-target-type-cache ;
+}
+
+
+# Creates a new non-composing 'generator' class instance and registers it.
+# Returns the created instance. Rationale: the instance is returned so that it
+# is possible to first register a generator and then call its 'run' method,
+# bypassing the whole generator selection process.
+#
+rule register-standard ( id : source-types * : target-types + : requirements * )
+{
+ local g = [ new generator $(id) : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Creates a new composing 'generator' class instance and registers it.
+#
+rule register-composing ( id : source-types * : target-types + : requirements *
+ )
+{
+ local g = [ new generator $(id) true : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Returns all generators belonging to the given 'toolset', i.e. whose ids are
+# '$(toolset).<something>'.
+#
+rule generators-for-toolset ( toolset )
+{
+ return $(.generators-for-toolset.$(toolset)) ;
+}
+
+
+# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
+# searching for generators that could produce a target of a certain type, both
+# those generators are among viable generators, the overridden generator is
+# immediately discarded.
+#
+# The overridden generators are discarded immediately after computing the list
+# of viable generators but before running any of them.
+#
+rule override ( overrider-id : overridee-id )
+{
+ .override.$(overrider-id) += $(overridee-id) ;
+}
+
+
+# Returns a list of source type which can possibly be converted to 'target-type'
+# by some chain of generator invocation.
+#
+# More formally, takes all generators for 'target-type' and returns a union of
+# source types for those generators and result of calling itself recursively on
+# source types.
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given type.
+#
+local rule viable-source-types-real ( target-type )
+{
+ local result ;
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ local t0 = [ type.all-bases $(target-type) ] ;
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ local t = $(t0) ;
+
+ while $(t)
+ {
+ # Find all generators for the current type. Unlike
+ # 'find-viable-generators' we do not care about the property-set.
+ local generators = $(.generators.$(t[1])) ;
+ t = $(t[2-]) ;
+
+ while $(generators)
+ {
+ local g = $(generators[1]) ;
+ generators = $(generators[2-]) ;
+
+ if ! [ $(g).source-types ]
+ {
+ # Empty source types -- everything can be accepted.
+ result = * ;
+ # This will terminate this loop.
+ generators = ;
+ # This will terminate the outer loop.
+ t = ;
+ }
+
+ for local source-type in [ $(g).source-types ]
+ {
+ if ! $(source-type) in $(result)
+ {
+ # If a generator accepts a 'source-type' it will also
+ # happily accept any type derived from it.
+ for local n in [ type.all-derived $(source-type) ]
+ {
+ if ! $(n) in $(result)
+ {
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if ! $(n) in $(t0)
+ {
+ t += $(n) ;
+ }
+ result += $(n) ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-real'.
+#
+rule viable-source-types ( target-type )
+{
+ local key = .vst.$(target-type) ;
+ if ! $($(key))
+ {
+ .vst-cached-types += $(target-type) ;
+ local v = [ viable-source-types-real $(target-type) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns the list of source types, which, when passed to 'run' method of
+# 'generator', has some change of being eventually used (probably after
+# conversion by other generators).
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given generator.
+#
+rule viable-source-types-for-generator-real ( generator )
+{
+ local source-types = [ $(generator).source-types ] ;
+ if ! $(source-types)
+ {
+ # If generator does not specify any source types, it might be a special
+ # generator like builtin.lib-generator which just relays to other
+ # generators. Return '*' to indicate that any source type is possibly
+ # OK, since we do not know for sure.
+ return * ;
+ }
+ else
+ {
+ local result ;
+ while $(source-types)
+ {
+ local s = $(source-types[1]) ;
+ source-types = $(source-types[2-]) ;
+ local viable-sources = [ generators.viable-source-types $(s) ] ;
+ if $(viable-sources) = *
+ {
+ result = * ;
+ source-types = ; # Terminate the loop.
+ }
+ else
+ {
+ result += [ type.all-derived $(s) ] $(viable-sources) ;
+ }
+ }
+ return [ sequence.unique $(result) ] ;
+ }
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-for-generator'.
+#
+local rule viable-source-types-for-generator ( generator )
+{
+ local key = .vstg.$(generator) ;
+ if ! $($(key))
+ {
+ .vstg-cached-generators += $(generator) ;
+ local v = [ viable-source-types-for-generator-real $(generator) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns usage requirements + list of created targets.
+#
+local rule try-one-generator-really ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local targets =
+ [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
+
+ local usage-requirements ;
+ local success ;
+
+ generators.dout [ indent ] returned $(targets) ;
+
+ if $(targets)
+ {
+ success = true ;
+
+ if [ class.is-a $(targets[1]) : property-set ]
+ {
+ usage-requirements = $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ }
+
+ generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
+ generators.dout [ indent ] " " $(targets) ;
+ if $(usage-requirements)
+ {
+ generators.dout [ indent ] " with usage requirements:" $(x) ;
+ }
+
+ if $(success)
+ {
+ return $(usage-requirements) $(targets) ;
+ }
+}
+
+
+# Checks if generator invocation can be pruned, because it is guaranteed to
+# fail. If so, quickly returns an empty list. Otherwise, calls
+# try-one-generator-really.
+#
+local rule try-one-generator ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local source-types ;
+ for local s in $(sources)
+ {
+ source-types += [ $(s).type ] ;
+ }
+ local viable-source-types = [ viable-source-types-for-generator $(generator)
+ ] ;
+
+ if $(source-types) && $(viable-source-types) != * &&
+ ! [ set.intersection $(source-types) : $(viable-source-types) ]
+ {
+ local id = [ $(generator).id ] ;
+ generators.dout [ indent ] " ** generator '$(id)' pruned" ;
+ #generators.dout [ indent ] "source-types" '$(source-types)' ;
+ #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
+ }
+ else
+ {
+ return [ try-one-generator-really $(project) $(name) : $(generator) :
+ $(target-type) : $(property-set) : $(sources) ] ;
+ }
+}
+
+
+rule construct-types ( project name ? : target-types + : property-set
+ : sources + )
+{
+ local result ;
+ local matched-types ;
+ local usage-requirements = [ property-set.empty ] ;
+ for local t in $(target-types)
+ {
+ local r = [ construct $(project) $(name) : $(t) : $(property-set) :
+ $(sources) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ matched-types += $(t) ;
+ }
+ }
+ # TODO: have to introduce parameter controlling if several types can be
+ # matched and add appropriate checks.
+
+ # TODO: need to review the documentation for 'construct' to see if it should
+ # return $(source) even if nothing can be done with it. Currents docs seem
+ # to imply that, contrary to the behaviour.
+ if $(result)
+ {
+ return $(usage-requirements) $(result) ;
+ }
+ else
+ {
+ return $(usage-requirements) $(sources) ;
+ }
+}
+
+
+# Ensures all 'targets' have their type. If this is not so, exists with error.
+#
+local rule ensure-type ( targets * )
+{
+ for local t in $(targets)
+ {
+ if ! [ $(t).type ]
+ {
+ errors.error "target" [ $(t).str ] "has no type" ;
+ }
+ }
+}
+
+
+# Returns generators which can be used to construct target of specified type
+# with specified properties. Uses the following algorithm:
+# - iterates over requested target-type and all its bases (in the order returned
+# by type.all-bases).
+# - for each type find all generators that generate that type and whose
+# requirements are satisfied by properties.
+# - if the set of generators is not empty, returns that set.
+#
+# Note: this algorithm explicitly ignores generators for base classes if there
+# is at least one generator for the requested target-type.
+#
+local rule find-viable-generators-aux ( target-type : property-set )
+{
+ # Select generators that can create the required target type.
+ local viable-generators = ;
+ local generator-rank = ;
+
+ import type ;
+ local t = [ type.all-bases $(target-type) ] ;
+
+ generators.dout [ indent ] find-viable-generators target-type= $(target-type)
+ property-set= [ $(property-set).as-path ] ;
+
+ # Get the list of generators for the requested type. If no generator is
+ # registered, try base type, and so on.
+ local generators ;
+ while $(t[1])
+ {
+ generators.dout [ indent ] "trying type" $(t[1]) ;
+ if $(.generators.$(t[1]))
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ generators = $(.generators.$(t[1])) ;
+
+ if $(t[1]) != $(target-type)
+ {
+ # We are here because there were no generators found for
+ # target-type but there are some generators for its base type.
+ # We will try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ local generators2 ;
+ for local g in $(generators)
+ {
+ # generators.register adds a generator to the list of
+ # generators for toolsets, which is a bit strange, but
+ # should work. That list is only used when inheriting a
+ # toolset, which should have been done before running
+ # generators.
+ generators2 += [ $(g).clone-and-change-target-type $(t[1]) :
+ $(target-type) ] ;
+ generators.register $(generators2[-1]) ;
+ }
+ generators = $(generators2) ;
+ }
+ t = ;
+ }
+ t = $(t[2-]) ;
+ }
+
+ for local g in $(generators)
+ {
+ generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
+
+ local m = [ $(g).match-rank $(property-set) ] ;
+ if $(m)
+ {
+ generators.dout [ indent ] " is viable" ;
+ viable-generators += $(g) ;
+ }
+ }
+
+ return $(viable-generators) ;
+}
+
+
+rule find-viable-generators ( target-type : property-set )
+{
+ local key = $(target-type).$(property-set) ;
+ local l = $(.fv.$(key)) ;
+ if ! $(l)
+ {
+ l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
+ if ! $(l)
+ {
+ l = none ;
+ }
+ .fv.$(key) = $(l) ;
+ }
+
+ if $(l) = none
+ {
+ l = ;
+ }
+
+ local viable-generators ;
+ for local g in $(l)
+ {
+ # Avoid trying the same generator twice on different levels.
+ if ! $(g) in $(.active-generators)
+ {
+ viable-generators += $(g) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
+ }
+ }
+
+ # Generators which override 'all'.
+ local all-overrides ;
+ # Generators which are overriden.
+ local overriden-ids ;
+ for local g in $(viable-generators)
+ {
+ local id = [ $(g).id ] ;
+ local this-overrides = $(.override.$(id)) ;
+ overriden-ids += $(this-overrides) ;
+ if all in $(this-overrides)
+ {
+ all-overrides += $(g) ;
+ }
+ }
+ if $(all-overrides)
+ {
+ viable-generators = $(all-overrides) ;
+ }
+ local result ;
+ for local g in $(viable-generators)
+ {
+ if ! [ $(g).id ] in $(overriden-ids)
+ {
+ result += $(g) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+.construct-stack = ;
+
+
+# Attempts to construct a target by finding viable generators, running them and
+# selecting the dependency graph.
+#
+local rule construct-really ( project name ? : target-type : property-set :
+ sources * )
+{
+ viable-generators = [ find-viable-generators $(target-type) :
+ $(property-set) ] ;
+
+ generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
+ " viable generators" ;
+
+ local result ;
+ local generators-that-succeeded ;
+ for local g in $(viable-generators)
+ {
+ # This variable will be restored on exit from this scope.
+ local .active-generators = $(g) $(.active-generators) ;
+
+ local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
+ : $(property-set) : $(sources) ] ;
+
+ if $(r)
+ {
+ generators-that-succeeded += $(g) ;
+ if $(result)
+ {
+ ECHO "Error: ambiguity found when searching for best transformation" ;
+ ECHO "Trying to produce type '$(target-type)' from: " ;
+ for local s in $(sources)
+ {
+ ECHO " - " [ $(s).str ] ;
+ }
+ ECHO "Generators that succeeded:" ;
+ for local g in $(generators-that-succeeded)
+ {
+ ECHO " - " [ $(g).id ] ;
+ }
+ ECHO "First generator produced: " ;
+ for local t in $(result[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ ECHO "Second generator produced: " ;
+ for local t in $(r[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ EXIT ;
+ }
+ else
+ {
+ result = $(r) ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
+# The 'sources' are treated as a collection of *possible* ingridients, i.e.
+# there is no obligation to consume them all.
+#
+# Returns a list of targets. When this invocation is first instance of
+# 'construct' in stack, returns only targets of requested 'target-type',
+# otherwise, returns also unused sources and additionally generated targets.
+#
+# If 'top-level' is set, does not suppress generators that are already
+# used in the stack. This may be useful in cases where a generator
+# has to build a metatargets -- for example a target corresponding to
+# built tool.
+#
+rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
+{
+ local saved-stack ;
+ if $(top-level)
+ {
+ saved-active = $(.active-generators) ;
+ .active-generators = ;
+ }
+
+ if (.construct-stack)
+ {
+ ensure-type $(sources) ;
+ }
+
+ .construct-stack += 1 ;
+
+ increase-indent ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] "*** construct" $(target-type) ;
+
+ for local s in $(sources)
+ {
+ generators.dout [ indent ] " from" $(s) ;
+ }
+ generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
+ }
+
+ local result = [ construct-really $(project) $(name) : $(target-type) :
+ $(property-set) : $(sources) ] ;
+
+ decrease-indent ;
+
+ .construct-stack = $(.construct-stack[2-]) ;
+
+ if $(top-level)
+ {
+ .active-generators = $(saved-active) ;
+ }
+
+ return $(result) ;
+}
+
+# Given 'result', obtained from some generator or generators.construct, adds
+# 'raw-properties' as usage requirements to it. If result already contains usage
+# requirements -- that is the first element of result of an instance of the
+# property-set class, the existing usage requirements and 'raw-properties' are
+# combined.
+#
+rule add-usage-requirements ( result * : raw-properties * )
+{
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ }
+ else
+ {
+ return [ property-set.create $(raw-properties) ] $(result) ;
+ }
+ }
+}
+
+rule dump ( )
+{
+ for local g in $(.all-generators)
+ {
+ ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
+ }
+}
+
diff --git a/jam-files/boost-build/build/modifiers.jam b/jam-files/boost-build/build/modifiers.jam
new file mode 100644
index 000000000..6b0093433
--- /dev/null
+++ b/jam-files/boost-build/build/modifiers.jam
@@ -0,0 +1,232 @@
+# Copyright 2003 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Modifiers are generalized generators that mutate targets in specific ways.
+# This structure allows for grouping a variety of functionality in an
+# orthogonal way to the functionality in toolsets, and without specifying
+# more target variations. In turn the modifiers can be used as building
+# blocks to implement simple requests, like the <version> feature.
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import property ;
+import virtual-target ;
+import numbers ;
+import sequence ;
+import symlink ;
+import property-set ;
+
+# Base generator for creating targets that are modifications of existing
+# targets.
+#
+class modifier : generator
+{
+ rule __init__ (
+ id
+ composing ?
+ : source-types *
+ : target-types-and-names +
+ : requirements *
+ )
+ {
+ generator.__init__ $(id) $(composing)
+ : $(source-types)
+ : $(target-types-and-names)
+ : $(requirements) ;
+
+ self.targets-in-progress = ;
+ }
+
+ # Wraps the generation of the target to call before and after rules to
+ # affect the real target.
+ #
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result ;
+ local current-target = $(project)^$(name) ;
+ if ! $(current-target) in $(self.targets-in-progress)
+ {
+ # Before modifications...
+ local project_ =
+ [ modify-project-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local name_ =
+ [ modify-name-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local property-set_ =
+ [ modify-properties-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local sources_ =
+ [ modify-sources-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ project = $(project_) ;
+ name = $(name_) ;
+ property-set = $(property-set_) ;
+ sources = $(sources_) ;
+
+ # Generate the real target...
+ local target-type-p =
+ [ property.select <main-target-type> : [ $(property-set).raw ] ] ;
+ self.targets-in-progress += $(current-target) ;
+ result =
+ [ generators.construct $(project) $(name)
+ : $(target-type-p:G=)
+ : $(property-set)
+ : $(sources) ] ;
+ self.targets-in-progress = $(self.targets-in-progress[1--2]) ;
+
+ # After modifications...
+ result =
+ [ modify-target-after $(result)
+ : $(project) $(name)
+ : $(property-set)
+ : $(sources) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule modify-project-before ( project name ? : property-set : sources + )
+ {
+ return $(project) ;
+ }
+
+ rule modify-name-before ( project name ? : property-set : sources + )
+ {
+ return $(name) ;
+ }
+
+ rule modify-properties-before ( project name ? : property-set : sources + )
+ {
+ return $(property-set) ;
+ }
+
+ rule modify-sources-before ( project name ? : property-set : sources + )
+ {
+ return $(sources) ;
+ }
+
+ rule modify-target-after ( target : project name ? : property-set : sources + )
+ {
+ return $(target) ;
+ }
+
+ # Utility, clones a file-target with optional changes to the name, type and
+ # project of the target.
+ # NOTE: This functionality should be moved, and generalized, to
+ # virtual-targets.
+ #
+ rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
+ {
+ # Need a MUTCH better way to clone a target...
+ new-name ?= [ $(target).name ] ;
+ new-type ?= [ $(target).type ] ;
+ new-project ?= [ $(target).project ] ;
+ local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ;
+
+ if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; }
+ $(result).root [ $(target).root ] ;
+ $(result).set-usage-requirements [ $(target).usage-requirements ] ;
+
+ local action = [ $(target).action ] ;
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+
+ local ps = [ $(action).properties ] ;
+ local cloned-action = [ new $(action-class) $(result) :
+ [ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ;
+ $(result).action $(cloned-action) ;
+
+ return $(result) ;
+ }
+}
+
+
+# A modifier that changes the name of a target, after it's generated, given a
+# regular expression to split the name, and a set of token to insert between the
+# split tokens of the name. This also exposes the target for other uses with a
+# symlink to the original name (optionally).
+#
+class name-modifier : modifier
+{
+ rule __init__ ( )
+ {
+ # Apply ourselves to EXE targets, for now.
+ modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ;
+ }
+
+ # Modifies the name, by cloning the target with the new name.
+ #
+ rule modify-target-after ( target : project name ? : property-set : sources + )
+ {
+ local result = $(target) ;
+
+ local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ;
+ if $(name-mod-p)
+ {
+ local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ;
+ if $(new-name) != [ $(target).name ]
+ {
+ result = [ clone-file-target $(target) : $(new-name) ] ;
+ }
+ local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ;
+ if $(expose-original-as-symlink)
+ {
+ local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ;
+ result = [ $(symlink-t).construct $(result)
+ : [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # Do the transformation of the name.
+ #
+ rule modify-name ( name : modifier-spec + )
+ {
+ local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ;
+ local name-parts = [ MATCH $(match) : $(name) ] ;
+ local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ;
+ local new-name-parts ;
+ local insert-position = 1 ;
+ while $(insertions)
+ {
+ local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ;
+ if $(insertion)
+ {
+ new-name-parts += $(insertion) ;
+ insertions = $(insertions[2-]) ;
+ }
+ new-name-parts += $(name-parts[1]) ;
+ name-parts = $(name-parts[2-]) ;
+ insert-position = [ numbers.increment $(insert-position) ] ;
+ }
+ new-name-parts += $(name-parts) ;
+ return [ sequence.join $(new-name-parts) ] ;
+ }
+
+ rule optional-properties ( )
+ {
+ return <name-modify>yes ;
+ }
+}
+feature.feature name-modifier : : free ;
+feature.feature name-modify : no yes : incidental optional ;
+generators.register [ new name-modifier ] ;
+
+# Translates <version> property to a set of modification properties
+# that are applied by the name-modifier, and symlink-modifier.
+#
+rule version-to-modifier ( property : properties * )
+{
+ return
+ <name-modify>yes
+ <name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=)
+ <name-modifier><symlink>yes
+ ;
+}
+feature.action <version> : version-to-modifier ;
diff --git a/jam-files/boost-build/build/project.jam b/jam-files/boost-build/build/project.jam
new file mode 100644
index 000000000..c9967613b
--- /dev/null
+++ b/jam-files/boost-build/build/project.jam
@@ -0,0 +1,1110 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented by:
+# - a module where all the Jamfile content live.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
+# and new project-attributes instance is created. Some rules necessary for
+# project are added to the module (see 'project-rules' module) at the bottom of
+# this file. Default project attributes are set (inheriting attributes of parent
+# project, if it exists). After that the Jamfile is read. It can declare its own
+# attributes using the 'project' rule which will be combined with any already
+# set attributes.
+#
+# The 'project' rule can also declare a project id which will be associated with
+# the project module.
+#
+# There can also be 'standalone' projects. They are created by calling
+# 'initialize' on an arbitrary module and not specifying their location. After
+# the call, the module can call the 'project' rule, declare main targets and
+# behave as a regular project except that, since it is not associated with any
+# location, it should not declare targets that are not prebuilt.
+#
+# The list of all loaded Jamfile is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be referenced using
+# their project id.
+
+import "class" : new ;
+import errors ;
+import modules ;
+import path ;
+import print ;
+import property-set ;
+import sequence ;
+
+
+# Loads the Jamfile at the given location. After loading, project global file
+# and Jamfiles needed by the requested one will be loaded recursively. If the
+# Jamfile at that location is loaded already, does nothing. Returns the project
+# module for the Jamfile.
+#
+rule load ( jamfile-location )
+{
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Loading Jamfile at" '$(jamfile-location)' ;
+ }
+
+ local module-name = [ module-name $(jamfile-location) ] ;
+ # If Jamfile is already loaded, don't try again.
+ if ! $(module-name) in $(.jamfile-modules)
+ {
+ load-jamfile $(jamfile-location) : $(module-name) ;
+
+ # We want to make sure that child project are loaded only after parent
+ # projects. In particular, because parent projects define attributes
+ # which are inherited by children, and we don't want children to be
+ # loaded before parent has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer to child
+ # projects from parent projects, we don't immediately load child
+ # projects when seeing those attributes. Instead, we record the minimal
+ # information to be used only later.
+ load-used-projects $(module-name) ;
+ }
+ return $(module-name) ;
+}
+
+
+rule load-used-projects ( module-name )
+{
+ local used = [ modules.peek $(module-name) : .used-projects ] ;
+ local location = [ attribute $(module-name) location ] ;
+ import project ;
+ while $(used)
+ {
+ local id = $(used[1]) ;
+ local where = $(used[2]) ;
+
+ project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ;
+ used = $(used[3-]) ;
+ }
+}
+
+
+# Note the use of character groups, as opposed to listing 'Jamroot' and
+# 'jamroot'. With the latter, we would get duplicate matches on Windows and
+# would have to eliminate duplicates.
+JAMROOT ?= [ modules.peek : JAMROOT ] ;
+JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
+
+
+# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
+#
+rule load-parent ( location )
+{
+ local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
+
+ if ! $(found)
+ {
+ ECHO error: Could not find parent for project at '$(location)' ;
+ EXIT error: Did not find Jamfile.jam or Jamroot.jam in any parent
+ directory. ;
+ }
+
+ return [ load $(found[1]:D) ] ;
+}
+
+
+# Makes the specified 'module' act as if it were a regularly loaded Jamfile at
+# 'location'. Reports an error if a Jamfile has already been loaded for that
+# location.
+#
+rule act-as-jamfile ( module : location )
+{
+ if [ module-name $(location) ] in $(.jamfile-modules)
+ {
+ errors.error "Jamfile was already loaded for '$(location)'" ;
+ }
+ # Set up non-default mapping from location to module.
+ .module.$(location) = $(module) ;
+
+ # Add the location to the list of project locations so that we don't try to
+ # reload the same Jamfile in the future.
+ .jamfile-modules += [ module-name $(location) ] ;
+
+ initialize $(module) : $(location) ;
+}
+
+
+# Returns the project module corresponding to the given project-id or plain
+# directory name. Returns nothing if such a project can not be found.
+#
+rule find ( name : current-location )
+{
+ local project-module ;
+
+ # Try interpreting name as project id.
+ if [ path.is-rooted $(name) ]
+ {
+ project-module = $($(name).jamfile-module) ;
+ }
+
+ if ! $(project-module)
+ {
+ local location = [ path.root [ path.make $(name) ] $(current-location) ]
+ ;
+
+ # If no project is registered for the given location, try to load it.
+ # First see if we have a Jamfile. If not, then see if we might have a
+ # project root willing to act as a Jamfile. In that case, project root
+ # must be placed in the directory referred by id.
+
+ project-module = [ module-name $(location) ] ;
+ if ! $(project-module) in $(.jamfile-modules)
+ {
+ if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
+ {
+ project-module = [ load $(location) ] ;
+ }
+ else
+ {
+ project-module = ;
+ }
+ }
+ }
+
+ return $(project-module) ;
+}
+
+
+# Returns the name of the module corresponding to 'jamfile-location'. If no
+# module corresponds to that location yet, associates the default module name
+# with that location.
+#
+rule module-name ( jamfile-location )
+{
+ if ! $(.module.$(jamfile-location))
+ {
+ # Root the path, so that locations are always unambiguous. Without this,
+ # we can't decide if '../../exe/program1' and '.' are the same paths.
+ jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ;
+ .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
+ }
+ return $(.module.$(jamfile-location)) ;
+}
+
+
+# Default patterns to search for the Jamfiles to use for build declarations.
+#
+JAMFILE = [ modules.peek : JAMFILE ] ;
+JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
+
+
+# Find the Jamfile at the given location. This returns the exact names of all
+# the Jamfiles in the given directory. The optional parent-root argument causes
+# this to search not the given directory but the ones above it up to the
+# directory given in it.
+#
+rule find-jamfile (
+ dir # The directory(s) to look for a Jamfile.
+ parent-root ? # Optional flag indicating to search for the parent Jamfile.
+ : no-errors ?
+ )
+{
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ local jamfile-glob = ;
+ if $(parent-root)
+ {
+ if ! $(.parent-jamfile.$(dir))
+ {
+ .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
+ ] ;
+ }
+ jamfile-glob = $(.parent-jamfile.$(dir)) ;
+ }
+ else
+ {
+ if ! $(.jamfile.$(dir))
+ {
+ .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
+ }
+ jamfile-glob = $(.jamfile.$(dir)) ;
+
+ }
+
+ local jamfile-to-load = $(jamfile-glob) ;
+ # Multiple Jamfiles found in the same place. Warn about this and ensure we
+ # use only one of them. As a temporary convenience measure, if there is
+ # Jamfile.v2 among found files, suppress the warning and use it.
+ #
+ if $(jamfile-to-load[2-])
+ {
+ local v2-jamfiles = [ MATCH (.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam) : $(jamfile-to-load) ] ;
+
+ if $(v2-jamfiles) && ! $(v2-jamfiles[2])
+ {
+ jamfile-to-load = $(v2-jamfiles) ;
+ }
+ else
+ {
+ local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
+ ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
+ "Loading the first one: '$(jamfile)'." ;
+ }
+
+ jamfile-to-load = $(jamfile-to-load[1]) ;
+ }
+
+ # Could not find it, error.
+ #
+ if ! $(no-errors) && ! $(jamfile-to-load)
+ {
+ errors.error Unable to load Jamfile.
+ : Could not find a Jamfile in directory '$(dir)'.
+ : Attempted to find it with pattern '"$(JAMFILE:J=" ")"'.
+ : Please consult the documentation at 'http://www.boost.org'. ;
+ }
+
+ return $(jamfile-to-load) ;
+}
+
+
+# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
+# the file as indicated by the JAMFILE patterns. Effect of calling this rule
+# twice with the same 'dir' is undefined.
+#
+local rule load-jamfile (
+ dir # The directory of the project Jamfile.
+ : jamfile-module
+ )
+{
+ # See if the Jamfile is where it should be.
+ #
+ local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
+ if ! $(jamfile-to-load)
+ {
+ jamfile-to-load = [ find-jamfile $(dir) ] ;
+ }
+
+ if $(jamfile-to-load[2])
+ {
+ errors.error "Multiple Jamfiles found at '$(dir)'"
+ : "Filenames are: " $(jamfile-to-load:D=) ;
+ }
+
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ .jamfile-modules += $(jamfile-module) ;
+
+ # Initialize the Jamfile module before loading.
+ #
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
+ : $(jamfile-to-load:BS) ;
+
+ local saved-project = $(.current-project) ;
+
+ mark-as-user $(jamfile-module) ;
+ modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
+ if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
+ {
+ jamfile = [ find-jamfile $(dir) : no-errors ] ;
+ if $(jamfile)
+ {
+ load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
+ }
+ }
+
+ # Now do some checks.
+ if $(.current-project) != $(saved-project)
+ {
+ errors.error "The value of the .current-project variable has magically"
+ : "changed after loading a Jamfile. This means some of the targets"
+ : "might be defined in the wrong project."
+ : "after loading" $(jamfile-module)
+ : "expected value" $(saved-project)
+ : "actual value" $(.current-project) ;
+ }
+
+ if $(.global-build-dir)
+ {
+ local id = [ attribute $(jamfile-module) id ] ;
+ local project-root = [ attribute $(jamfile-module) project-root ] ;
+ local location = [ attribute $(jamfile-module) location ] ;
+
+ if $(location) && $(project-root) = $(dir)
+ {
+ # This is Jamroot.
+ if ! $(id)
+ {
+ ECHO "warning: the --build-dir option was specified" ;
+ ECHO "warning: but Jamroot at '$(dir)'" ;
+ ECHO "warning: specified no project id" ;
+ ECHO "warning: the --build-dir option will be ignored" ;
+ }
+ }
+ }
+ }
+}
+
+
+rule mark-as-user ( module-name )
+{
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+rule load-aux ( module-name : file )
+{
+ mark-as-user $(module-name) ;
+
+ module $(module-name)
+ {
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+}
+
+
+.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ;
+if $(.global-build-dir)
+{
+ # If the option is specified several times, take the last value.
+ .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
+}
+
+
+# Initialize the module for a project.
+#
+rule initialize (
+ module-name # The name of the project module.
+ : location ? # The location (directory) of the project to initialize. If
+ # not specified, a standalone project will be initialized.
+ : basename ?
+ )
+{
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Initializing project '$(module-name)'" ;
+ }
+
+ # TODO: need to consider if standalone projects can do anything but define
+ # prebuilt targets. If so, we need to give it a more sensible "location", so
+ # that source paths are correct.
+ location ?= "" ;
+ # Create the module for the Jamfile first.
+ module $(module-name)
+ {
+ }
+ $(module-name).attributes = [ new project-attributes $(location)
+ $(module-name) ] ;
+ local attributes = $($(module-name).attributes) ;
+
+ if $(location)
+ {
+ $(attributes).set source-location : [ path.make $(location) ] : exact ;
+ }
+ else if ! $(module-name) in test-config site-config user-config project-config
+ {
+ # This is a standalone project with known location. Set source location
+ # so that it can declare targets. This is intended so that you can put
+ # a .jam file in your sources and use it via 'using'. Standard modules
+ # (in 'tools' subdir) may not assume source dir is set.
+ local s = [ modules.binding $(module-name) ] ;
+ if ! $(s)
+ {
+ errors.error "Could not determine project location $(module-name)" ;
+ }
+ $(attributes).set source-location : $(s:D) : exact ;
+ }
+
+ $(attributes).set requirements : [ property-set.empty ] : exact ;
+ $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
+
+ # Import rules common to all project modules from project-rules module,
+ # defined at the end of this file.
+ local rules = [ RULENAMES project-rules ] ;
+ IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
+
+ local jamroot ;
+
+ local parent-module ;
+ if $(module-name) = test-config
+ {
+ # No parent.
+ }
+ else if $(module-name) = site-config
+ {
+ parent-module = test-config ;
+ }
+ else if $(module-name) = user-config
+ {
+ parent-module = site-config ;
+ }
+ else if $(module-name) = project-config
+ {
+ parent-module = user-config ;
+ }
+ else
+ {
+ # We search for parent/project-root only if Jamfile was specified, i.e.
+ # if the project is not standalone.
+ if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
+ {
+ parent-module = [ load-parent $(location) ] ;
+ }
+ else
+ {
+ # It's either jamroot or standalone project. If it's jamroot,
+ # inherit from user-config.
+ if $(location)
+ {
+ # If project-config module exist, inherit from it.
+ if $(project-config.attributes)
+ {
+ parent-module = project-config ;
+ }
+ else
+ {
+ parent-module = user-config ;
+ }
+ jamroot = true ;
+ }
+ }
+ }
+
+ if $(parent-module)
+ {
+ inherit-attributes $(module-name) : $(parent-module) ;
+ $(attributes).set parent-module : $(parent-module) : exact ;
+ }
+
+ if $(jamroot)
+ {
+ $(attributes).set project-root : $(location) : exact ;
+ }
+
+ local parent ;
+ if $(parent-module)
+ {
+ parent = [ target $(parent-module) ] ;
+ }
+
+ if ! $(.target.$(module-name))
+ {
+ .target.$(module-name) = [ new project-target $(module-name)
+ : $(module-name) $(parent)
+ : [ attribute $(module-name) requirements ] ] ;
+
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Assigned project target" $(.target.$(module-name))
+ "to '$(module-name)'" ;
+ }
+ }
+
+ .current-project = [ target $(module-name) ] ;
+}
+
+
+# Make 'project-module' inherit attributes of project root and parent module.
+#
+rule inherit-attributes ( project-module : parent-module )
+{
+ local attributes = $($(project-module).attributes) ;
+ local pattributes = [ attributes $(parent-module) ] ;
+ # Parent module might be locationless configuration module.
+ if [ modules.binding $(parent-module) ]
+ {
+ $(attributes).set parent : [ path.parent
+ [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ }
+ local v = [ $(pattributes).get project-root ] ;
+ $(attributes).set project-root : $(v) : exact ;
+ $(attributes).set default-build
+ : [ $(pattributes).get default-build ] ;
+ $(attributes).set requirements
+ : [ $(pattributes).get requirements ] : exact ;
+ $(attributes).set usage-requirements
+ : [ $(pattributes).get usage-requirements ] : exact ;
+
+ local parent-build-dir = [ $(pattributes).get build-dir ] ;
+ if $(parent-build-dir)
+ {
+ # Have to compute relative path from parent dir to our dir. Convert both
+ # paths to absolute, since we cannot find relative path from ".." to
+ # ".".
+
+ local location = [ attribute $(project-module) location ] ;
+ local parent-location = [ attribute $(parent-module) location ] ;
+
+ local pwd = [ path.pwd ] ;
+ local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
+ local our-dir = [ path.root $(location) $(pwd) ] ;
+ $(attributes).set build-dir : [ path.join $(parent-build-dir)
+ [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
+ }
+}
+
+
+# Associate the given id with the given project module.
+#
+rule register-id ( id : module )
+{
+ $(id).jamfile-module = $(module) ;
+}
+
+
+# Class keeping all the attributes of a project.
+#
+# The standard attributes are "id", "location", "project-root", "parent"
+# "requirements", "default-build", "source-location" and "projects-to-build".
+#
+class project-attributes
+{
+ import property ;
+ import property-set ;
+ import errors ;
+ import path ;
+ import print ;
+ import sequence ;
+ import project ;
+
+ rule __init__ ( location project-module )
+ {
+ self.location = $(location) ;
+ self.project-module = $(project-module) ;
+ }
+
+ # Set the named attribute from the specification given by the user. The
+ # value actually set may be different.
+ #
+ rule set ( attribute : specification *
+ : exact ? # Sets value from 'specification' without any processing.
+ )
+ {
+ if $(exact)
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ else if $(attribute) = "requirements"
+ {
+ local result = [ property-set.refine-from-user-input
+ $(self.requirements) : $(specification)
+ : $(self.project-module) : $(self.location) ] ;
+
+ if $(result[1]) = "@error"
+ {
+ errors.error Requirements for project at '$(self.location)'
+ conflict with parent's. : Explanation: $(result[2-]) ;
+ }
+ else
+ {
+ self.requirements = $(result) ;
+ }
+ }
+ else if $(attribute) = "usage-requirements"
+ {
+ local unconditional ;
+ for local p in $(specification)
+ {
+ local split = [ property.split-conditional $(p) ] ;
+ split ?= nothing $(p) ;
+ unconditional += $(split[2]) ;
+ }
+
+ local non-free = [ property.remove free : $(unconditional) ] ;
+ if $(non-free)
+ {
+ errors.error usage-requirements $(specification) have non-free
+ properties $(non-free) ;
+ }
+ local t = [ property.translate-paths $(specification)
+ : $(self.location) ] ;
+ if $(self.usage-requirements)
+ {
+ self.usage-requirements = [ property-set.create
+ [ $(self.usage-requirements).raw ] $(t) ] ;
+ }
+ else
+ {
+ self.usage-requirements = [ property-set.create $(t) ] ;
+ }
+ }
+ else if $(attribute) = "default-build"
+ {
+ self.default-build = [ property.make $(specification) ] ;
+ }
+ else if $(attribute) = "source-location"
+ {
+ self.source-location = ;
+ for local src-path in $(specification)
+ {
+ self.source-location += [ path.root [ path.make $(src-path) ]
+ $(self.location) ] ;
+ }
+ }
+ else if $(attribute) = "build-dir"
+ {
+ self.build-dir = [ path.root
+ [ path.make $(specification) ] $(self.location) ] ;
+ }
+ else if $(attribute) = "id"
+ {
+ id = [ path.root $(specification) / ] ;
+ project.register-id $(id) : $(self.project-module) ;
+ self.id = $(id) ;
+ }
+ else if ! $(attribute) in "default-build" "location" "parent"
+ "projects-to-build" "project-root" "source-location"
+ {
+ errors.error Invalid project attribute '$(attribute)' specified for
+ project at '$(self.location)' ;
+ }
+ else
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ }
+
+ # Returns the value of the given attribute.
+ #
+ rule get ( attribute )
+ {
+ return $(self.$(attribute)) ;
+ }
+
+ # Prints the project attributes.
+ #
+ rule print ( )
+ {
+ local id = $(self.id) ; id ?= (none) ;
+ local parent = $(self.parent) ; parent ?= (none) ;
+ print.section "'"$(id)"'" ;
+ print.list-start ;
+ print.list-item "Parent project:" $(parent) ;
+ print.list-item "Requirements:" [ $(self.requirements).raw ] ;
+ print.list-item "Default build:" $(self.default-build) ;
+ print.list-item "Source location:" $(self.source-location) ;
+ print.list-item "Projects to build:"
+ [ sequence.insertion-sort $(self.projects-to-build) ] ;
+ print.list-end ;
+ }
+}
+
+
+# Returns the project which is currently being loaded.
+#
+rule current ( )
+{
+ return $(.current-project) ;
+}
+
+
+# Temporarily changes the current project to 'project'. Should be followed by
+# 'pop-current'.
+#
+rule push-current ( project )
+{
+ .saved-current-project += $(.current-project) ;
+ .current-project = $(project) ;
+}
+
+
+rule pop-current ( )
+{
+ .current-project = $(.saved-current-project[-1]) ;
+ .saved-current-project = $(.saved-current-project[1--2]) ;
+}
+
+
+# Returns the project-attribute instance for the specified Jamfile module.
+#
+rule attributes ( project )
+{
+ return $($(project).attributes) ;
+}
+
+
+# Returns the value of the specified attribute in the specified Jamfile module.
+#
+rule attribute ( project attribute )
+{
+ return [ $($(project).attributes).get $(attribute) ] ;
+}
+
+
+# Returns the project target corresponding to the 'project-module'.
+#
+rule target ( project-module )
+{
+ if ! $(.target.$(project-module))
+ {
+ .target.$(project-module) = [ new project-target $(project-module)
+ : $(project-module)
+ : [ attribute $(project-module) requirements ] ] ;
+ }
+ return $(.target.$(project-module)) ;
+}
+
+
+# Use/load a project.
+#
+rule use ( id : location )
+{
+ local saved-project = $(.current-project) ;
+ local project-module = [ project.load $(location) ] ;
+ local declared-id = [ project.attribute $(project-module) id ] ;
+
+ if ! $(declared-id) || $(declared-id) != $(id)
+ {
+ # The project at 'location' either has no id or that id is not equal to
+ # the 'id' parameter.
+ if $($(id).jamfile-module) && ( $($(id).jamfile-module) !=
+ $(project-module) )
+ {
+ errors.user-error Attempt to redeclare already existing project id
+ '$(id)'
+ location '$(location)' ;
+ }
+ $(id).jamfile-module = $(project-module) ;
+ }
+ .current-project = $(saved-project) ;
+}
+
+
+# Defines a Boost.Build extension project. Such extensions usually contain
+# library targets and features that can be used by many people. Even though
+# extensions are really projects, they can be initialized as a module would be
+# with the "using" (project.project-rules.using) mechanism.
+#
+rule extension ( id : options * : * )
+{
+ # The caller is a standalone module for the extension.
+ local mod = [ CALLER_MODULE ] ;
+
+ # We need to do the rest within the extension module.
+ module $(mod)
+ {
+ import path ;
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ # Create the project data, and bring in the project rules into the
+ # module.
+ project.initialize $(__name__) : [ path.join [ project.attribute
+ $(root-project) location ] ext $(1:L) ] ;
+
+ # Create the project itself, i.e. the attributes. All extensions are
+ # created in the "/ext" project space.
+ project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ;
+ local attributes = [ project.attributes $(__name__) ] ;
+
+ # Inherit from the root project of whomever is defining us.
+ project.inherit-attributes $(__name__) : $(root-project) ;
+ $(attributes).set parent-module : $(root-project) : exact ;
+ }
+}
+
+
+rule glob-internal ( project : wildcards + : excludes * : rule-name )
+{
+ local location = [ $(project).get source-location ] ;
+
+ local result ;
+ local paths = [ path.$(rule-name) $(location) :
+ [ sequence.transform path.make : $(wildcards) ] :
+ [ sequence.transform path.make : $(excludes) ] ] ;
+ if $(wildcards:D) || $(rule-name) != glob
+ {
+ # The paths we have found are relative to the current directory, but the
+ # names specified in the sources list are assumed to be relative to the
+ # source directory of the corresponding project. So, just make the names
+ # absolute.
+ for local p in $(paths)
+ {
+ # If the path is below source location, use relative path.
+ # Otherwise, use full path just to avoid any ambiguities.
+ local rel = [ path.relative $(p) $(location) : no-error ] ;
+ if $(rel) = not-a-child
+ {
+ result += [ path.root $(p) [ path.pwd ] ] ;
+ }
+ else
+ {
+ result += $(rel) ;
+ }
+ }
+ }
+ else
+ {
+ # There were no wildcards in the directory path, so the files are all in
+ # the source directory of the project. Just drop the directory, instead
+ # of making paths absolute.
+ result = $(paths:D="") ;
+ }
+
+ return $(result) ;
+}
+
+
+# This module defines rules common to all projects.
+#
+module project-rules
+{
+ rule using ( toolset-module : * )
+ {
+ import toolset ;
+ import modules ;
+ import project ;
+
+ # Temporarily change the search path so the module referred to by
+ # 'using' can be placed in the same directory as Jamfile. User will
+ # expect the module to be found even though the directory is not in
+ # BOOST_BUILD_PATH.
+ local x = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local caller = [ CALLER_MODULE ] ;
+ local caller-location = [ modules.binding $(caller) ] ;
+ modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
+ toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ modules.poke : BOOST_BUILD_PATH : $(x) ;
+
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ modules.poke project : .current-project
+ : [ project.target $(caller) ] ;
+ }
+
+ import modules ;
+
+ rule import ( * : * : * )
+ {
+ modules.import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local saved = [ modules.peek project : .current-project ] ;
+ module $(caller)
+ {
+ modules.import $(1) : $(2) : $(3) ;
+ }
+ modules.poke project : .current-project : $(saved) ;
+ }
+
+ rule project ( id ? : options * : * )
+ {
+ import errors ;
+ import path ;
+ import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ if $(id)
+ {
+ $(attributes).set id : $(id) ;
+ }
+
+ local explicit-build-dir ;
+
+ for n in 2 3 4 5 6 7 8 9
+ {
+ local option = $($(n)) ;
+ if $(option)
+ {
+ $(attributes).set $(option[1]) : $(option[2-]) ;
+ }
+ if $(option[1]) = "build-dir"
+ {
+ explicit-build-dir = [ path.make $(option[2-]) ] ;
+ }
+ }
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ local global-build-dir =
+ [ modules.peek project : .global-build-dir ] ;
+
+ if $(global-build-dir)
+ {
+ local location = [ $(attributes).get location ] ;
+ # Project with an empty location is a 'standalone' project such as
+ # user-config or qt. It has no build dir. If we try to set build dir
+ # for user-config, we shall then try to inherit it, with either
+ # weird or wrong consequences.
+ if $(location) && $(location) = [ $(attributes).get project-root ]
+ {
+ # Re-read the project id, since it might have been changed in
+ # the project's attributes.
+ id = [ $(attributes).get id ] ;
+ # This is Jamroot.
+ if $(id)
+ {
+ if $(explicit-build-dir) &&
+ [ path.is-rooted $(explicit-build-dir) ]
+ {
+ errors.user-error Absolute directory specified via
+ 'build-dir' project attribute : Do not know how to
+ combine that with the --build-dir option. ;
+ }
+ # Strip the leading slash from id.
+ local rid = [ MATCH /(.*) : $(id) ] ;
+ local p = [ path.join
+ $(global-build-dir) $(rid) $(explicit-build-dir) ] ;
+
+ $(attributes).set build-dir : $(p) : exact ;
+ }
+ }
+ else
+ {
+ # Not Jamroot.
+ if $(explicit-build-dir)
+ {
+ errors.user-error When --build-dir is specified, the
+ 'build-dir' project : attribute is allowed only for
+ top-level 'project' invocations ;
+ }
+ }
+ }
+ }
+
+ # Declare and set a project global constant. Project global constants are
+ # normal variables but should not be changed. They are applied to every
+ # child Jamfile.
+ #
+ rule constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) ;
+ }
+
+ # Declare and set a project global constant, whose value is a path. The path
+ # is adjusted to be relative to the invocation directory. The given value
+ # path is taken to be either absolute, or relative to this project root.
+ #
+ rule path-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) : path ;
+ }
+
+ rule use-project ( id : where )
+ {
+ import modules ;
+ # See comment in 'load' for explanation.
+ local caller = [ CALLER_MODULE ] ;
+ modules.poke $(caller) : .used-projects :
+ [ modules.peek $(caller) : .used-projects ]
+ $(id) $(where) ;
+ }
+
+ rule build-project ( dir )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+
+ local now = [ $(attributes).get projects-to-build ] ;
+ $(attributes).set projects-to-build : $(now) $(dir) ;
+ }
+
+ rule explicit ( target-names * )
+ {
+ import project ;
+ # If 'explicit' is used in a helper rule defined in Jamroot and
+ # inherited by children, then most of the time we want 'explicit' to
+ # operate on the Jamfile where the helper rule is invoked.
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-explicit $(n) ;
+ }
+ }
+
+ rule always ( target-names * )
+ {
+ import project ;
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-always $(n) ;
+ }
+ }
+
+ rule glob ( wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob ] ;
+ }
+
+ rule glob-tree ( wildcards + : excludes * )
+ {
+ import project ;
+
+ if $(wildcards:D) || $(excludes:D)
+ {
+ errors.user-error The patterns to 'glob-tree' may not include
+ directory ;
+ }
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob-tree ] ;
+ }
+
+ # Calculates conditional requirements for multiple requirements at once.
+ # This is a shorthand to reduce duplication and to keep an inline
+ # declarative syntax. For example:
+ #
+ # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ #
+ rule conditional ( condition + : requirements * )
+ {
+ local condition = $(condition:J=,) ;
+ if [ MATCH (:) : $(condition) ]
+ {
+ return $(condition)$(requirements) ;
+ }
+ else
+ {
+ return $(condition):$(requirements) ;
+ }
+ }
+
+ rule option ( name : value )
+ {
+ if $(__name__) != site-config && $(__name__) != user-config && $(__name__) != project-config
+ {
+ import errors ;
+ errors.error "The 'option' rule may be used only in site-config or user-config" ;
+ }
+ import option ;
+ option.set $(name) : $(value) ;
+ }
+}
diff --git a/jam-files/boost-build/build/property-set.jam b/jam-files/boost-build/build/property-set.jam
new file mode 100644
index 000000000..70fd90cde
--- /dev/null
+++ b/jam-files/boost-build/build/property-set.jam
@@ -0,0 +1,481 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import feature ;
+import path ;
+import project ;
+import property ;
+import sequence ;
+import set ;
+import option ;
+
+# Class for storing a set of properties.
+#
+# There is 1<->1 correspondence between identity and value. No two instances
+# of the class are equal. To maintain this property, the 'property-set.create'
+# rule should be used to create new instances. Instances are immutable.
+#
+# Each property is classified with regard to its effect on build results.
+# Incidental properties have no effect on build results, from Boost.Build's
+# point of view. Others are either free, or non-free and we refer to non-free
+# ones as 'base'. Each property belongs to exactly one of those categories.
+#
+# It is possible to get a list of properties belonging to each category as
+# well as a list of properties with a specific attribute.
+#
+# Several operations, like and refine and as-path are provided. They all use
+# caching whenever possible.
+#
+class property-set
+{
+ import errors ;
+ import feature ;
+ import path ;
+ import property ;
+ import property-set ;
+ import set ;
+
+ rule __init__ ( raw-properties * )
+ {
+ self.raw = $(raw-properties) ;
+
+ for local p in $(raw-properties)
+ {
+ if ! $(p:G)
+ {
+ errors.error "Invalid property: '$(p)'" ;
+ }
+
+ local att = [ feature.attributes $(p:G) ] ;
+ # A feature can be both incidental and free, in which case we add it
+ # to incidental.
+ if incidental in $(att)
+ {
+ self.incidental += $(p) ;
+ }
+ else if free in $(att)
+ {
+ self.free += $(p) ;
+ }
+ else
+ {
+ self.base += $(p) ;
+ }
+
+ if dependency in $(att)
+ {
+ self.dependency += $(p) ;
+ }
+ else
+ {
+ self.non-dependency += $(p) ;
+ }
+
+ if [ MATCH (:) : $(p:G=) ]
+ {
+ self.conditional += $(p) ;
+ }
+ else
+ {
+ self.non-conditional += $(p) ;
+ }
+
+ if propagated in $(att)
+ {
+ self.propagated += $(p) ;
+ }
+ if link-incompatible in $(att)
+ {
+ self.link-incompatible += $(p) ;
+ }
+ }
+ }
+
+ # Returns Jam list of stored properties.
+ #
+ rule raw ( )
+ {
+ return $(self.raw) ;
+ }
+
+ rule str ( )
+ {
+ return "[" $(self.raw) "]" ;
+ }
+
+ # Returns properties that are neither incidental nor free.
+ #
+ rule base ( )
+ {
+ return $(self.base) ;
+ }
+
+ # Returns free properties which are not incidental.
+ #
+ rule free ( )
+ {
+ return $(self.free) ;
+ }
+
+ # Returns dependency properties.
+ #
+ rule dependency ( )
+ {
+ return $(self.dependency) ;
+ }
+
+ rule non-dependency ( )
+ {
+ return $(self.non-dependency) ;
+ }
+
+ rule conditional ( )
+ {
+ return $(self.conditional) ;
+ }
+
+ rule non-conditional ( )
+ {
+ return $(self.non-conditional) ;
+ }
+
+ # Returns incidental properties.
+ #
+ rule incidental ( )
+ {
+ return $(self.incidental) ;
+ }
+
+ rule refine ( ps )
+ {
+ if ! $(self.refined.$(ps))
+ {
+ local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
+ if $(r[1]) != "@error"
+ {
+ self.refined.$(ps) = [ property-set.create $(r) ] ;
+ }
+ else
+ {
+ self.refined.$(ps) = $(r) ;
+ }
+ }
+ return $(self.refined.$(ps)) ;
+ }
+
+ rule expand ( )
+ {
+ if ! $(self.expanded)
+ {
+ self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ;
+ }
+ return $(self.expanded) ;
+ }
+
+ rule expand-composites ( )
+ {
+ if ! $(self.composites)
+ {
+ self.composites = [ property-set.create
+ [ feature.expand-composites $(self.raw) ] ] ;
+ }
+ return $(self.composites) ;
+ }
+
+ rule evaluate-conditionals ( context ? )
+ {
+ context ?= $(__name__) ;
+ if ! $(self.evaluated.$(context))
+ {
+ self.evaluated.$(context) = [ property-set.create
+ [ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
+ }
+ return $(self.evaluated.$(context)) ;
+ }
+
+ rule propagated ( )
+ {
+ if ! $(self.propagated-ps)
+ {
+ self.propagated-ps = [ property-set.create $(self.propagated) ] ;
+ }
+ return $(self.propagated-ps) ;
+ }
+
+ rule link-incompatible ( )
+ {
+ if ! $(self.link-incompatible-ps)
+ {
+ self.link-incompatible-ps =
+ [ property-set.create $(self.link-incompatible) ] ;
+ }
+ return $(self.link-incompatible-ps) ;
+ }
+
+ rule run-actions ( )
+ {
+ if ! $(self.run)
+ {
+ self.run = [ property-set.create [ feature.run-actions $(self.raw) ] ] ;
+ }
+ return $(self.run) ;
+ }
+
+ rule add-defaults ( )
+ {
+ if ! $(self.defaults)
+ {
+ self.defaults = [ property-set.create
+ [ feature.add-defaults $(self.raw) ] ] ;
+ }
+ return $(self.defaults) ;
+ }
+
+ rule as-path ( )
+ {
+ if ! $(self.as-path)
+ {
+ self.as-path = [ property.as-path $(self.base) ] ;
+ }
+ return $(self.as-path) ;
+ }
+
+ # Computes the path to be used for a target with the given properties.
+ # Returns a list of
+ # - the computed path
+ # - if the path is relative to the build directory, a value of 'true'.
+ #
+ rule target-path ( )
+ {
+ if ! $(self.target-path)
+ {
+ # The <location> feature can be used to explicitly change the
+ # location of generated targets.
+ local l = [ get <location> ] ;
+ if $(l)
+ {
+ self.target-path = $(l) ;
+ }
+ else
+ {
+ local p = [ as-path ] ;
+ p = [ property-set.hash-maybe $(p) ] ;
+
+ # A real ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason, we
+ # teach V2 to do the things regression system requires. The
+ # value of '<location-prefix>' is prepended to the path.
+ local prefix = [ get <location-prefix> ] ;
+ if $(prefix)
+ {
+ self.target-path = [ path.join $(prefix) $(p) ] ;
+ }
+ else
+ {
+ self.target-path = $(p) ;
+ }
+ if ! $(self.target-path)
+ {
+ self.target-path = . ;
+ }
+ # The path is relative to build dir.
+ self.target-path += true ;
+ }
+ }
+ return $(self.target-path) ;
+ }
+
+ rule add ( ps )
+ {
+ if ! $(self.added.$(ps))
+ {
+ self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
+ }
+ return $(self.added.$(ps)) ;
+ }
+
+ rule add-raw ( properties * )
+ {
+ return [ add [ property-set.create $(properties) ] ] ;
+ }
+
+ rule link-incompatible-with ( ps )
+ {
+ if ! $(.li.$(ps))
+ {
+ local li1 = [ $(__name__).link-incompatible ] ;
+ local li2 = [ $(ps).link-incompatible ] ;
+ if [ set.equal $(li1) : $(li2) ]
+ {
+ .li.$(ps) = false ;
+ }
+ else
+ {
+ .li.$(ps) = true ;
+ }
+ }
+ if $(.li.$(ps)) = true
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns all values of 'feature'.
+ #
+ rule get ( feature )
+ {
+ if ! $(self.map-built)
+ {
+ # For each feature, create a member var and assign all values to it.
+ # Since all regular member vars start with 'self', there will be no
+ # conflicts between names.
+ self.map-built = true ;
+ for local v in $(self.raw)
+ {
+ $(v:G) += $(v:G=) ;
+ }
+ }
+ return $($(feature)) ;
+ }
+}
+
+
+# Creates a new 'property-set' instance for the given raw properties or returns
+# an already existing ones.
+#
+rule create ( raw-properties * )
+{
+ raw-properties = [ sequence.unique
+ [ sequence.insertion-sort $(raw-properties) ] ] ;
+
+ local key = $(raw-properties:J=-:E=) ;
+
+ if ! $(.ps.$(key))
+ {
+ .ps.$(key) = [ new property-set $(raw-properties) ] ;
+ }
+ return $(.ps.$(key)) ;
+}
+NATIVE_RULE property-set : create ;
+
+
+# Creates a new 'property-set' instance after checking that all properties are
+# valid and converting incidental properties into gristed form.
+#
+rule create-with-validation ( raw-properties * )
+{
+ property.validate $(raw-properties) ;
+ return [ create [ property.make $(raw-properties) ] ] ;
+}
+
+
+# Creates a property-set from the input given by the user, in the context of
+# 'jamfile-module' at 'location'.
+#
+rule create-from-user-input ( raw-properties * : jamfile-module location )
+{
+ local specification = [ property.translate-paths $(raw-properties)
+ : $(location) ] ;
+ specification = [ property.translate-indirect $(specification)
+ : $(jamfile-module) ] ;
+ local project-id = [ project.attribute $(jamfile-module) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ specification = [ property.translate-dependencies
+ $(specification) : $(project-id) : $(location) ] ;
+ specification =
+ [ property.expand-subfeatures-in-conditions $(specification) ] ;
+ specification = [ property.make $(specification) ] ;
+ return [ property-set.create $(specification) ] ;
+}
+
+
+# Refines requirements with requirements provided by the user. Specially handles
+# "-<property>value" syntax in specification to remove given requirements.
+# - parent-requirements -- property-set object with requirements to refine.
+# - specification -- string list of requirements provided by the user.
+# - project-module -- module to which context indirect features will be
+# bound.
+# - location -- path to which path features are relative.
+#
+rule refine-from-user-input ( parent-requirements : specification * :
+ project-module : location )
+{
+ if ! $(specification)
+ {
+ return $(parent-requirements) ;
+ }
+ else
+ {
+ local add-requirements ;
+ local remove-requirements ;
+
+ for local r in $(specification)
+ {
+ local m = [ MATCH "^-(.*)" : $(r) ] ;
+ if $(m)
+ {
+ remove-requirements += $(m) ;
+ }
+ else
+ {
+ add-requirements += $(r) ;
+ }
+ }
+
+ if $(remove-requirements)
+ {
+ # Need to create a property set, so that path features and indirect
+ # features are translated just like they are in project
+ # requirements.
+ local ps = [ property-set.create-from-user-input
+ $(remove-requirements) : $(project-module) $(location) ] ;
+
+ parent-requirements = [ property-set.create
+ [ set.difference [ $(parent-requirements).raw ]
+ : [ $(ps).raw ] ] ] ;
+ specification = $(add-requirements) ;
+ }
+
+ local requirements = [ property-set.create-from-user-input
+ $(specification) : $(project-module) $(location) ] ;
+
+ return [ $(parent-requirements).refine $(requirements) ] ;
+ }
+}
+
+
+# Returns a property-set with an empty set of properties.
+#
+rule empty ( )
+{
+ if ! $(.empty)
+ {
+ .empty = [ create ] ;
+ }
+ return $(.empty) ;
+}
+
+if [ option.get hash : : yes ] = yes
+{
+ rule hash-maybe ( path ? )
+ {
+ path ?= "" ;
+ return [ MD5 $(path) ] ;
+ }
+}
+else
+{
+ rule hash-maybe ( path ? )
+ {
+ return $(path) ;
+ }
+}
+
diff --git a/jam-files/boost-build/build/property.jam b/jam-files/boost-build/build/property.jam
new file mode 100644
index 000000000..a2ad5226b
--- /dev/null
+++ b/jam-files/boost-build/build/property.jam
@@ -0,0 +1,788 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import feature ;
+import indirect ;
+import path ;
+import regex ;
+import string ;
+import sequence ;
+import set ;
+import utility ;
+
+
+# Refines 'properties' by overriding any non-free and non-conditional properties
+# for which a different value is specified in 'requirements'. Returns the
+# resulting list of properties.
+#
+rule refine ( properties * : requirements * )
+{
+ local result ;
+ local error ;
+
+ # All the 'requirements' elements should be present in the result. Record
+ # them so that we can handle 'properties'.
+ for local r in $(requirements)
+ {
+ # Do not consider conditional requirements.
+ if ! [ MATCH (:) : $(r:G=) ]
+ {
+ # Note: cannot use a local variable here, so use an ugly name.
+ __require__$(r:G) = $(r:G=) ;
+ }
+ }
+
+ for local p in $(properties)
+ {
+ if [ MATCH (:) : $(p:G=) ]
+ {
+ # Do not modify conditional properties.
+ result += $(p) ;
+ }
+ else if free in [ feature.attributes $(p:G) ]
+ {
+ # Do not modify free properties.
+ result += $(p) ;
+ }
+ else
+ {
+ local required-value = $(__require__$(p:G)) ;
+ if $(required-value)
+ {
+ if $(p:G=) != $(required-value)
+ {
+ result += $(p:G)$(required-value) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ }
+
+ # Unset our ugly map.
+ for local r in $(requirements)
+ {
+ __require__$(r:G) = ;
+ }
+
+ if $(error)
+ {
+ return $(error) ;
+ }
+ else
+ {
+ return [ sequence.unique $(result) $(requirements) ] ;
+ }
+}
+
+
+# Removes all conditional properties whose conditions are not met. For those
+# with met conditions, removes the condition. Properties in conditions are
+# looked up in 'context'.
+#
+rule evaluate-conditionals-in-context ( properties * : context * )
+{
+ local base ;
+ local conditionals ;
+ for local p in $(properties)
+ {
+ if [ MATCH (:<) : $(p) ]
+ {
+ conditionals += $(p) ;
+ }
+ else
+ {
+ base += $(p) ;
+ }
+ }
+
+ local result = $(base) ;
+ for local p in $(conditionals)
+ {
+ # Separate condition and property.
+ local s = [ MATCH (.*):(<.*) : $(p) ] ;
+ # Split condition into individual properties.
+ local condition = [ regex.split $(s[1]) "," ] ;
+ # Evaluate condition.
+ if ! [ MATCH (!).* : $(condition:G=) ]
+ {
+ # Only positive checks
+ if $(condition) in $(context)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ else
+ {
+ # Have negative checks
+ local fail ;
+ while $(condition)
+ {
+ local c = $(condition[1]) ;
+ local m = [ MATCH !(.*) : $(c) ] ;
+ if $(m)
+ {
+ local p = $(m:G=$(c:G)) ;
+ if $(p) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ else
+ {
+ if ! $(c) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ condition = $(condition[2-]) ;
+ }
+ if ! $(fail)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+rule expand-subfeatures-in-conditions ( properties * )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local s = [ MATCH (.*):(<.*) : $(p) ] ;
+ if ! $(s)
+ {
+ result += $(p) ;
+ }
+ else
+ {
+ local condition = $(s[1]) ;
+ local value = $(s[2]) ;
+ # Condition might include several elements.
+ condition = [ regex.split $(condition) "," ] ;
+ local e ;
+ for local c in $(condition)
+ {
+ # It is common for a condition to include a toolset or
+ # subfeatures that have not been defined. In that case we want
+ # the condition to simply 'never be satisfied' and validation
+ # would only produce a spurious error so we prevent it by
+ # passing 'true' as the second parameter.
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+ if $(e) = $(condition)
+ {
+ # (todo)
+ # This is just an optimization and possibly a premature one at
+ # that.
+ # (todo) (12.07.2008.) (Jurko)
+ result += $(p) ;
+ }
+ else
+ {
+ result += $(e:J=,):$(value) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for as-path, below. Orders properties with the implicit ones first, and
+# within the two sections in alphabetical order of feature name.
+#
+local rule path-order ( x y )
+{
+ if $(y:G) && ! $(x:G)
+ {
+ return true ;
+ }
+ else if $(x:G) && ! $(y:G)
+ {
+ return ;
+ }
+ else
+ {
+ if ! $(x:G)
+ {
+ x = [ feature.expand-subfeatures $(x) ] ;
+ y = [ feature.expand-subfeatures $(y) ] ;
+ }
+
+ if $(x[1]) < $(y[1])
+ {
+ return true ;
+ }
+ }
+}
+
+
+local rule abbreviate-dashed ( string )
+{
+ local r ;
+ for local part in [ regex.split $(string) - ]
+ {
+ r += [ string.abbreviate $(part) ] ;
+ }
+ return $(r:J=-) ;
+}
+
+
+local rule identity ( string )
+{
+ return $(string) ;
+}
+
+
+if --abbreviate-paths in [ modules.peek : ARGV ]
+{
+ .abbrev = abbreviate-dashed ;
+}
+else
+{
+ .abbrev = identity ;
+}
+
+
+# Returns a path representing the given expanded property set.
+#
+rule as-path ( properties * )
+{
+ local entry = .result.$(properties:J=-) ;
+
+ if ! $($(entry))
+ {
+ # Trim redundancy.
+ properties = [ feature.minimize $(properties) ] ;
+
+ # Sort according to path-order.
+ properties = [ sequence.insertion-sort $(properties) : path-order ] ;
+
+ local components ;
+ for local p in $(properties)
+ {
+ if $(p:G)
+ {
+ local f = [ utility.ungrist $(p:G) ] ;
+ p = $(f)-$(p:G=) ;
+ }
+ components += [ $(.abbrev) $(p) ] ;
+ }
+
+ $(entry) = $(components:J=/) ;
+ }
+
+ return $($(entry)) ;
+}
+
+
+# Exit with error if property is not valid.
+#
+local rule validate1 ( property )
+{
+ local msg ;
+ if $(property:G)
+ {
+ local feature = $(property:G) ;
+ local value = $(property:G=) ;
+
+ if ! [ feature.valid $(feature) ]
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "unknown feature '$(feature)'" ;
+ }
+ else if $(value) && ! free in [ feature.attributes $(feature) ]
+ {
+ feature.validate-value-string $(feature) $(value) ;
+ }
+ else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "No value specified for feature '$(feature)'" ;
+ }
+ }
+ else
+ {
+ local feature = [ feature.implied-feature $(property) ] ;
+ feature.validate-value-string $(feature) $(property) ;
+ }
+ if $(msg)
+ {
+ errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
+ }
+}
+
+
+rule validate ( properties * )
+{
+ for local p in $(properties)
+ {
+ validate1 $(p) ;
+ }
+}
+
+
+rule validate-property-sets ( property-sets * )
+{
+ for local s in $(property-sets)
+ {
+ validate [ feature.split $(s) ] ;
+ }
+}
+
+
+# Expands any implicit property values in the given property 'specification' so
+# they explicitly state their feature.
+#
+rule make ( specification * )
+{
+ local result ;
+ for local e in $(specification)
+ {
+ if $(e:G)
+ {
+ result += $(e) ;
+ }
+ else if [ feature.is-implicit-value $(e) ]
+ {
+ local feature = [ feature.implied-feature $(e) ] ;
+ result += $(feature)$(e) ;
+ }
+ else
+ {
+ errors.error "'$(e)' is not a valid property specification" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that do not
+# have their attributes listed in 'attributes'.
+#
+rule remove ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that have
+# their attributes listed in 'attributes'.
+#
+rule take ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Selects properties corresponding to any of the given features.
+#
+rule select ( features * : properties * )
+{
+ local result ;
+
+ # Add any missing angle brackets.
+ local empty = "" ;
+ features = $(empty:G=$(features)) ;
+
+ for local p in $(properties)
+ {
+ if $(p:G) in $(features)
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a modified version of properties with all values of the given feature
+# replaced by the given value. If 'value' is empty the feature will be removed.
+#
+rule change ( properties * : feature value ? )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ result += $(value:G=$(feature)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'property' is a conditional property, returns the condition and the
+# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
+# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
+# string.
+#
+rule split-conditional ( property )
+{
+ local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
+ if $(m)
+ {
+ return $(m[1]) <$(m[2]) ;
+ }
+}
+
+
+# Interpret all path properties in 'properties' as relative to 'path'. The
+# property values are assumed to be in system-specific form, and will be
+# translated into normalized form.
+#
+rule translate-paths ( properties * : path )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+
+ if path in [ feature.attributes $(p:G) ]
+ {
+ local values = [ regex.split $(p:TG=) "&&" ] ;
+ local t ;
+ for local v in $(values)
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ t = $(t:J="&&") ;
+ result += $(condition)$(t:TG=$(p:G)) ;
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Assumes that all feature values that start with '@' are names of rules, used
+# in 'context-module'. Such rules can be either local to the module or global.
+# Converts such values into 'indirect-rule' format (see indirect.jam), so they
+# can be called from other modules. Does nothing for such values that are
+# already in the 'indirect-rule' format.
+#
+rule translate-indirect ( specification * : context-module )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local m = [ MATCH ^@(.+) : $(p:G=) ] ;
+ if $(m)
+ {
+ local v ;
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ v = $(m) ;
+ }
+ else
+ {
+ if ! [ MATCH ".*([.]).*" : $(m) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ m = $(context-module).$(m) ;
+ }
+ v = [ indirect.make $(m) : $(context-module) ] ;
+ }
+
+ v = @$(v) ;
+ result += $(v:G=$(p:G)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Binds all dependency properties in a list relative to the given project.
+# Targets with absolute paths will be left unchanged and targets which have a
+# project specified will have the path to the project interpreted relative to
+# the specified location.
+#
+rule translate-dependencies ( specification * : project-id : location )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(location) [ path.pwd ] ] ] ;
+ result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(p:G=) ]
+ {
+ result += $(condition)$(p) ;
+ }
+ else
+ {
+ result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
+ }
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Class maintaining a property set -> string mapping.
+#
+class property-map
+{
+ import errors ;
+ import numbers ;
+ import sequence ;
+
+ rule __init__ ( )
+ {
+ self.next-flag = 1 ;
+ }
+
+ # Associate 'value' with 'properties'.
+ #
+ rule insert ( properties + : value )
+ {
+ self.all-flags += $(self.next-flag) ;
+ self.properties.$(self.next-flag) = $(properties) ;
+ self.value.$(self.next-flag) = $(value) ;
+
+ self.next-flag = [ numbers.increment $(self.next-flag) ] ;
+ }
+
+ # Returns the value associated with 'properties' or any subset of it. If
+ # more than one subset has a value assigned to it, returns the value for the
+ # longest subset, if it is unique.
+ #
+ rule find ( properties + )
+ {
+ return [ find-replace $(properties) ] ;
+ }
+
+ # Returns the value associated with 'properties'. If 'value' parameter is
+ # given, replaces the found value.
+ #
+ rule find-replace ( properties + : value ? )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ if $(self.properties.$(i)) in $(properties)
+ {
+ matches += $(i) ;
+ match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ local original = $(self.value.$(best)) ;
+ if $(value)
+ {
+ self.value.$(best) = $(value) ;
+ }
+ return $(original) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+ import feature ;
+
+ # Local rules must be explicitly re-imported.
+ import property : path-order abbreviate-dashed ;
+
+ feature.prepare-test property-test-temp ;
+
+ feature.feature toolset : gcc : implicit symmetric ;
+ feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
+ 3.0.2 : optional ;
+ feature.feature define : : free ;
+ feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
+ feature.feature optimization : on off ;
+ feature.feature variant : debug release : implicit composite symmetric ;
+ feature.feature rtti : on off : link-incompatible ;
+
+ feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
+ feature.compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+
+ assert.true path-order $(test-space) debug <define>foo ;
+ assert.false path-order $(test-space) <define>foo debug ;
+ assert.true path-order $(test-space) gcc debug ;
+ assert.false path-order $(test-space) debug gcc ;
+ assert.true path-order $(test-space) <optimization>on <rtti>on ;
+ assert.false path-order $(test-space) <rtti>on <optimization>on ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
+ : refine <toolset>gcc <rtti>off
+ : <define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <optimization>on
+ : refine <toolset>gcc <optimization>off
+ : <optimization>on
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off
+ : refine <toolset>gcc : <rtti>off : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
+ : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+ : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+ : $(test-space) ;
+
+ assert.result <define>MY_RELEASE
+ : evaluate-conditionals-in-context
+ <variant>release,<rtti>off:<define>MY_RELEASE
+ : <toolset>gcc <variant>release <rtti>off ;
+
+ assert.result debug
+ : as-path <optimization>off <variant>debug
+ : $(test-space) ;
+
+ assert.result gcc/debug/rtti-off
+ : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
+ : $(test-space) ;
+
+ assert.result optmz-off : abbreviate-dashed optimization-off ;
+ assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
+
+ try ;
+ validate <feature>value : $(test-space) ;
+ catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+
+ try ;
+ validate <rtti>default : $(test-space) ;
+ catch \"default\" is not a known value of feature <rtti> ;
+
+ validate <define>WHATEVER : $(test-space) ;
+
+ try ;
+ validate <rtti> : $(test-space) ;
+ catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+
+ try ;
+ validate value : $(test-space) ;
+ catch "value" is not a value of an implicit feature ;
+
+ assert.result-set-equal <rtti>on
+ : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+
+ assert.result-set-equal <include>a
+ : select include : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a
+ : select include bar : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a <toolset>gcc
+ : select include <bar> <toolset> : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <toolset>kylix <include>a
+ : change <toolset>gcc <include>a : <toolset> kylix ;
+
+ pm = [ new property-map ] ;
+ $(pm).insert <toolset>gcc : o ;
+ $(pm).insert <toolset>gcc <os>NT : obj ;
+ $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
+
+ assert.equal o : [ $(pm).find <toolset>gcc ] ;
+
+ assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
+
+ try ;
+ $(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
+ catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
+
+ # Test ordinary properties.
+ assert.result : split-conditional <toolset>gcc ;
+
+ # Test properties with ":".
+ assert.result : split-conditional <define>FOO=A::B ;
+
+ # Test conditional feature.
+ assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+ : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
+
+ feature.finish-test property-test-temp ;
+}
diff --git a/jam-files/boost-build/build/readme.txt b/jam-files/boost-build/build/readme.txt
new file mode 100644
index 000000000..c3dddd8d7
--- /dev/null
+++ b/jam-files/boost-build/build/readme.txt
@@ -0,0 +1,13 @@
+Copyright 2001, 2002 Dave Abrahams
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+Development code for new build system. To run unit tests for jam code, execute:
+
+ bjam --debug --build-system=test
+
+Comprehensive tests require Python. See ../test/readme.txt
+
+
+
diff --git a/jam-files/boost-build/build/scanner.jam b/jam-files/boost-build/build/scanner.jam
new file mode 100644
index 000000000..d6042ea2c
--- /dev/null
+++ b/jam-files/boost-build/build/scanner.jam
@@ -0,0 +1,153 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements scanners: objects that compute implicit dependencies for
+# files, such as includes in C++.
+#
+# Scanner has a regular expression used to find dependencies, some
+# data needed to interpret those dependencies (for example, include
+# paths), and a code which actually established needed relationship
+# between actual jam targets.
+#
+# Scanner objects are created by actions, when they try to actualize
+# virtual targets, passed to 'virtual-target.actualize' method and are
+# then associated with actual targets. It is possible to use
+# several scanners for a virtual-target. For example, a single source
+# might be used by to compile actions, with different include paths.
+# In this case, two different actual targets will be created, each
+# having scanner of its own.
+#
+# Typically, scanners are created from target type and action's
+# properties, using the rule 'get' in this module. Directly creating
+# scanners is not recommended, because it might create many equvivalent
+# but different instances, and lead in unneeded duplication of
+# actual targets. However, actions can also create scanners in a special
+# way, instead of relying on just target type.
+
+import "class" : new ;
+import property virtual-target property-set ;
+import errors : error ;
+
+# Base scanner class.
+class scanner
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Returns a pattern to use for scanning
+ rule pattern ( )
+ {
+ error "method must be overriden" ;
+ }
+
+ # Establish necessary relationship between targets,
+ # given actual target beeing scanned, and a list of
+ # pattern matches in that file.
+ rule process ( target : matches * )
+ {
+ error "method must be overriden" ;
+ }
+}
+
+# Registers a new generator class, specifying a set of
+# properties relevant to this scanner. Ctor for that class
+# should have one parameter: list of properties.
+rule register ( scanner-class : relevant-properties * )
+{
+ .registered += $(scanner-class) ;
+ .relevant-properties.$(scanner-class) = $(relevant-properties) ;
+}
+
+# Common scanner class, which can be used when there's only one
+# kind of includes (unlike C, where "" and <> includes have different
+# search paths).
+class common-scanner : scanner
+{
+ import scanner ;
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+ ISFILE $(matches) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+
+# Returns an instance of previously registered scanner,
+# with the specified properties.
+rule get ( scanner-class : property-set )
+{
+ if ! $(scanner-class) in $(.registered)
+ {
+ error "attempt to get unregisted scanner" ;
+ }
+
+ local r = $(.rv-cache.$(property-set)) ;
+ if ! $(r)
+ {
+ r = [ property-set.create
+ [ property.select $(.relevant-properties.$(scanner-class)) :
+ [ $(property-set).raw ] ] ] ;
+ .rv-cache.$(property-set) = $(r) ;
+ }
+
+ if ! $(scanner.$(scanner-class).$(r:J=-))
+ {
+ scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ;
+ }
+ return $(scanner.$(scanner-class).$(r:J=-)) ;
+}
+
+
+# Installs the specified scanner on actual target 'target'.
+rule install ( scanner : target
+ vtarget # virtual target from which 'target' was actualized
+)
+{
+ HDRSCAN on $(target) = [ $(scanner).pattern ] ;
+ SCANNER on $(target) = $(scanner) ;
+ HDRRULE on $(target) = scanner.hdrrule ;
+
+ # scanner reflects difference in properties affecting
+ # binding of 'target', which will be known when processing
+ # includes for it, will give information on how to
+ # interpret quoted includes.
+ HDRGRIST on $(target) = $(scanner) ;
+}
+
+# Propagate scanner setting from 'including-target' to 'targets'.
+rule propagate ( scanner : targets * : including-target )
+{
+ HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
+ SCANNER on $(targets) = $(scanner) ;
+ HDRRULE on $(targets) = scanner.hdrrule ;
+ HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
+}
+
+
+rule hdrrule ( target : matches * : binding )
+{
+ local scanner = [ on $(target) return $(SCANNER) ] ;
+ $(scanner).process $(target) : $(matches) : $(binding) ;
+}
+# hdrrule must be available at global scope so that it can be invoked
+# by header scanning
+IMPORT scanner : hdrrule : : scanner.hdrrule ;
+
+
+
+
diff --git a/jam-files/boost-build/build/targets.jam b/jam-files/boost-build/build/targets.jam
new file mode 100644
index 000000000..a70532ce7
--- /dev/null
+++ b/jam-files/boost-build/build/targets.jam
@@ -0,0 +1,1659 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in a
+# Jamfile.
+#
+# Abstract targets are represented by classes derived from 'abstract-target'
+# class. The first abstract target is 'project-target', which is created for
+# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
+# (see project.jam).
+#
+# Project targets keep a list of 'main-target' instances. A main target is what
+# the user explicitly defines in a Jamfile. It is possible to have several
+# definitions for a main target, for example to have different lists of sources
+# for different platforms. So, main targets keep a list of alternatives.
+#
+# Each alternative is an instance of 'abstract-target'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to use,
+# create an instance of that class and add it to the list of alternatives for
+# the main target.
+#
+# Rules supplied by the build system will use only targets derived from
+# 'basic-target' class, which will provide some default behaviour. There will be
+# different classes derived from it such as 'make-target', created by the 'make'
+# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
+
+#
+# +------------------------+
+# |abstract-target |
+# +========================+
+# |name |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project-target | | main-target | | basic-target |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
+# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
+# | | typed-target | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import assert ;
+import "class" : new ;
+import errors ;
+import feature ;
+import indirect ;
+import path ;
+import property ;
+import property-set ;
+import sequence ;
+import set ;
+import toolset ;
+import build-request ;
+
+
+# Base class for all abstract targets.
+#
+class abstract-target
+{
+ import project ;
+ import assert ;
+ import "class" ;
+ import errors ;
+
+ rule __init__ ( name # Name of the target in Jamfile.
+ : project-target # The project target to which this one belongs.
+ )
+ {
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is
+ # error messages which should name problematic targets. Another is
+ # setting correct paths for sources and generated files.
+
+ self.name = $(name) ;
+ self.project = $(project-target) ;
+ self.location = [ errors.nearest-user-location ] ;
+ }
+
+ # Returns the name of this target.
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Returns the project for this target.
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Return the location where the target was declared.
+ rule location ( )
+ {
+ return $(self.location) ;
+ }
+
+ # Returns a user-readable name for this target.
+ rule full-name ( )
+ {
+ local location = [ $(self.project).get location ] ;
+ return $(location)/$(self.name) ;
+ }
+
+ # Generates virtual targets for this abstract target using the specified
+ # properties, unless a different value of some feature is required by the
+ # target.
+ # On success, returns:
+ # - a property-set with the usage requirements to be applied to dependants
+ # - a list of produced virtual targets, which may be empty.
+ # If 'property-set' is empty, performs the default build of this target, in
+ # a way specific to the derived class.
+ #
+ rule generate ( property-set )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ rule rename ( new-name )
+ {
+ self.name = $(new-name) ;
+ }
+}
+
+
+if --debug-building in [ modules.peek : ARGV ]
+{
+ modules.poke : .debug-building : true ;
+}
+
+
+rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Project target class (derived from 'abstract-target').
+#
+# This class has the following responsibilities:
+# - Maintaining a list of main targets in this project and building them.
+#
+# Main targets are constructed in two stages:
+# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
+# that time, alternatives can also be renamed to account for inline targets.
+# - The first time 'main-target' or 'has-main-target' rule is called, all
+# alternatives are enumerated and main targets are created.
+#
+class project-target : abstract-target
+{
+ import project ;
+ import targets ;
+ import path ;
+ import print ;
+ import property-set ;
+ import set ;
+ import sequence ;
+ import "class" : new ;
+ import errors ;
+
+ rule __init__ ( name : project-module parent-project ?
+ : requirements * : default-build * )
+ {
+ abstract-target.__init__ $(name) : $(__name__) ;
+
+ self.project-module = $(project-module) ;
+ self.location = [ project.attribute $(project-module) location ] ;
+ self.requirements = $(requirements) ;
+ self.default-build = $(default-build) ;
+
+ if $(parent-project)
+ {
+ inherit $(parent-project) ;
+ }
+ }
+
+ # This is needed only by the 'make' rule. Need to find the way to make
+ # 'make' work without this method.
+ #
+ rule project-module ( )
+ {
+ return $(self.project-module) ;
+ }
+
+ rule get ( attribute )
+ {
+ return [ project.attribute $(self.project-module) $(attribute) ] ;
+ }
+
+ rule build-dir ( )
+ {
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ get build-dir ] ;
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ path.join [ $(self.project).get location ]
+ bin ] ;
+ }
+ }
+ return $(self.build-dir) ;
+ }
+
+ # Generates all possible targets contained in this project.
+ #
+ rule generate ( property-set * )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ;
+ targets.increase-indent ;
+ }
+
+ local usage-requirements = [ property-set.empty ] ;
+ local targets ;
+
+ for local t in [ targets-to-build ]
+ {
+ local g = [ $(t).generate $(property-set) ] ;
+ usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
+ targets += $(g[2-]) ;
+ }
+ targets.decrease-indent ;
+ return $(usage-requirements) [ sequence.unique $(targets) ] ;
+ }
+
+ # Computes and returns a list of abstract-target instances which must be
+ # built when this project is built.
+ #
+ rule targets-to-build ( )
+ {
+ local result ;
+
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ # Collect all main targets here, except for "explicit" ones.
+ for local t in $(self.main-targets)
+ {
+ if ! [ $(t).name ] in $(self.explicit-targets)
+ {
+ result += $(t) ;
+ }
+ }
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ local self-location = [ get location ] ;
+ for local pn in [ get projects-to-build ]
+ {
+ result += [ find $(pn)/ ] ;
+ }
+
+ return $(result) ;
+ }
+
+ # Add 'target' to the list of targets in this project that should be build
+ # only by explicit request
+ #
+ rule mark-target-as-explicit ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.explicit-targets += $(target-name) ;
+ }
+
+ rule mark-target-as-always ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.always-targets += $(target-name) ;
+ }
+
+ # Add new target alternative
+ #
+ rule add-alternative ( target-instance )
+ {
+ if $(self.built-main-targets)
+ {
+ errors.error add-alternative called when main targets are already
+ created. : in project [ full-name ] ;
+ }
+ self.alternatives += $(target-instance) ;
+ }
+
+ # Returns a 'main-target' class instance corresponding to 'name'.
+ #
+ rule main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+ return $(self.main-target.$(name)) ;
+ }
+
+ # Returns whether a main target with the specified name exists.
+ #
+ rule has-main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ if $(self.main-target.$(name))
+ {
+ return true ;
+ }
+ }
+
+ # Worker function for the find rule not implementing any caching and simply
+ # returning nothing in case the target can not be found.
+ #
+ rule find-really ( id )
+ {
+ local result ;
+ local current-location = [ get location ] ;
+
+ local split = [ MATCH (.*)//(.*) : $(id) ] ;
+ local project-part = $(split[1]) ;
+ local target-part = $(split[2]) ;
+
+ local extra-error-message ;
+ if $(project-part)
+ {
+ # There is an explicitly specified project part in id. Looks up the
+ # project and passes the request to it.
+ local pm = [ project.find $(project-part) : $(current-location) ] ;
+ if $(pm)
+ {
+ project-target = [ project.target $(pm) ] ;
+ result = [ $(project-target).find $(target-part) : no-error ] ;
+ }
+ else
+ {
+ # TODO: This extra error message will not get displayed most
+ # likely due to some buggy refactoring. Refactor the code so the
+ # message gets diplayed again.
+ extra-error-message = error: could not find project
+ '$(project-part)' ;
+ }
+ }
+ else
+ {
+ # Interpret target-name as name of main target. Need to do this
+ # before checking for file. Consider the following scenario with a
+ # toolset not modifying its executable's names, e.g. gcc on
+ # Unix-like platforms:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After the first build we would have a target named 'test' in the
+ # Jamfile and a file named 'test' on the disk. We need the target to
+ # override the file.
+ result = [ main-target $(id) ] ;
+
+ # Interpret id as an existing file reference.
+ if ! $(result)
+ {
+ result = [ new file-reference [ path.make $(id) ] :
+ $(self.project) ] ;
+ if ! [ $(result).exists ]
+ {
+ result = ;
+ }
+ }
+
+ # Interpret id as project-id.
+ if ! $(result)
+ {
+ local project-module = [ project.find $(id) :
+ $(current-location) ] ;
+ if $(project-module)
+ {
+ result = [ project.target $(project-module) ] ;
+ }
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # Find and return the target with the specified id, treated relative to
+ # self. Id may specify either a target or a file name with the target taking
+ # priority. May report an error or return nothing if the target is not found
+ # depending on the 'no-error' parameter.
+ #
+ rule find ( id : no-error ? )
+ {
+ local v = $(.id.$(id)) ;
+ if ! $(v)
+ {
+ v = [ find-really $(id) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ .id.$(id) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+ else
+ {
+ if ! $(no-error)
+ {
+ local current-location = [ get location ] ;
+ ECHO "error: Unable to find file or target named" ;
+ ECHO "error: '$(id)'" ;
+ ECHO "error: referred from project at" ;
+ ECHO "error: '$(current-location)'" ;
+ ECHO $(extra-error-message) ;
+ EXIT ;
+ }
+ }
+ }
+
+ rule build-main-targets ( )
+ {
+ self.built-main-targets = true ;
+ for local a in $(self.alternatives)
+ {
+ local name = [ $(a).name ] ;
+ local target = $(self.main-target.$(name)) ;
+ if ! $(target)
+ {
+ local t = [ new main-target $(name) : $(self.project) ] ;
+ self.main-target.$(name) = $(t) ;
+ self.main-targets += $(t) ;
+ target = $(self.main-target.$(name)) ;
+ }
+
+ if $(name) in $(self.always-targets)
+ {
+ $(a).always ;
+ }
+
+ $(target).add-alternative $(a) ;
+ }
+ }
+
+ # Accessor, add a constant.
+ #
+ rule add-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ : type ? # Optional type of value.
+ )
+ {
+ switch $(type)
+ {
+ case path :
+ local r ;
+ for local v in $(value)
+ {
+ local l = $(self.location) ;
+ if ! $(l)
+ {
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location.
+ # It might be more reasonable to make every project have
+ # a location and use some other approach to prevent buildable
+ # targets in config files, but that's for later.
+ l = [ get source-location ] ;
+ }
+ v = [ path.root [ path.make $(v) ] $(l) ] ;
+ # Now make the value absolute path.
+ v = [ path.root $(v) [ path.pwd ] ] ;
+ # Constants should be in platform-native form.
+ v = [ path.native $(v) ] ;
+ r += $(v) ;
+ }
+ value = $(r) ;
+ }
+ if ! $(name) in $(self.constants)
+ {
+ self.constants += $(name) ;
+ }
+ self.constant.$(name) = $(value) ;
+ # Inject the constant in the scope of the Jamroot module.
+ modules.poke $(self.project-module) : $(name) : $(value) ;
+ }
+
+ rule inherit ( parent )
+ {
+ for local c in [ modules.peek $(parent) : self.constants ]
+ {
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ add-constant $(c)
+ : [ modules.peek $(parent) : self.constant.$(c) ] ;
+ }
+
+ # Import rules from parent.
+ local this-module = [ project-module ] ;
+ local parent-module = [ $(parent).project-module ] ;
+ # Do not import rules coming from 'project-rules' as they must be
+ # imported localized.
+ local user-rules = [ set.difference
+ [ RULENAMES $(parent-module) ] :
+ [ RULENAMES project-rules ] ] ;
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+}
+
+
+# Helper rules to detect cycles in main target references.
+#
+local rule start-building ( main-target-instance )
+{
+ if $(main-target-instance) in $(.targets-being-built)
+ {
+ local names ;
+ for local t in $(.targets-being-built) $(main-target-instance)
+ {
+ names += [ $(t).full-name ] ;
+ }
+
+ errors.error "Recursion in main target references"
+ : "the following target are being built currently:"
+ : $(names) ;
+ }
+ .targets-being-built += $(main-target-instance) ;
+}
+
+
+local rule end-building ( main-target-instance )
+{
+ .targets-being-built = $(.targets-being-built[1--2]) ;
+}
+
+
+# A named top-level target in Jamfile.
+#
+class main-target : abstract-target
+{
+ import assert ;
+ import errors ;
+ import feature ;
+ import print ;
+ import property-set ;
+ import sequence ;
+ import targets : start-building end-building ;
+
+ rule __init__ ( name : project )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+ }
+
+ # Add a new alternative for this target
+ rule add-alternative ( target )
+ {
+ local d = [ $(target).default-build ] ;
+ if $(self.alternatives) && ( $(self.default-build) != $(d) )
+ {
+ errors.error "default build must be identical in all alternatives"
+ : "main target is" [ full-name ]
+ : "with" [ $(d).raw ]
+ : "differing from previous default build" [ $(self.default-build).raw ] ;
+ }
+ else
+ {
+ self.default-build = $(d) ;
+ }
+ self.alternatives += $(target) ;
+ }
+
+ # Returns the best viable alternative for this property-set. See the
+ # documentation for selection rules.
+ #
+ local rule select-alternatives ( property-set debug ? )
+ {
+ # When selecting alternatives we have to consider defaults, for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # won't work unless we add default value <variant>debug.
+ property-set = [ $(p).add-defaults ] ;
+
+ # The algorithm: we keep the current best viable alternative. When we've
+ # got a new best viable alternative, we compare it with the current one.
+
+ local best ;
+ local best-properties ;
+
+ if $(self.alternatives[2-])
+ {
+ local bad ;
+ local worklist = $(self.alternatives) ;
+ while $(worklist) && ! $(bad)
+ {
+ local v = $(worklist[1]) ;
+ local properties = [ $(v).match $(property-set) $(debug) ] ;
+
+ if $(properties) != no-match
+ {
+ if ! $(best)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ if $(properties) = $(best-properties)
+ {
+ bad = true ;
+ }
+ else if $(properties) in $(best-properties)
+ {
+ # Do nothing, this alternative is worse
+ }
+ else if $(best-properties) in $(properties)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ bad = true ;
+ }
+ }
+ }
+ worklist = $(worklist[2-]) ;
+ }
+ if ! $(bad)
+ {
+ return $(best) ;
+ }
+ }
+ else
+ {
+ return $(self.alternatives) ;
+ }
+ }
+
+ rule apply-default-build ( property-set )
+ {
+ return [ targets.apply-default-build $(property-set)
+ : $(self.default-build) ] ;
+ }
+
+ # Select an alternative for this main target, by finding all alternatives
+ # which requirements are satisfied by 'properties' and picking the one with
+ # the longest requirements set. Returns the result of calling 'generate' on
+ # that alternative.
+ #
+ rule generate ( property-set )
+ {
+ start-building $(__name__) ;
+
+ # We want composite properties in build request act as if all the
+ # properties it expands too are explicitly specified.
+ property-set = [ $(property-set).expand ] ;
+
+ local all-property-sets = [ apply-default-build $(property-set) ] ;
+ local usage-requirements = [ property-set.empty ] ;
+ local result ;
+ for local p in $(all-property-sets)
+ {
+ local r = [ generate-really $(p) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ end-building $(__name__) ;
+ return $(usage-requirements) [ sequence.unique $(result) ] ;
+ }
+
+ # Generates the main target with the given property set and returns a list
+ # which first element is property-set object containing usage-requirements
+ # of generated target and with generated virtual target in other elements.
+ # It is possible that no targets are generated.
+ #
+ local rule generate-really ( property-set )
+ {
+ local best-alternatives = [ select-alternatives $(property-set) ] ;
+ if ! $(best-alternatives)
+ {
+ ECHO "error: No best alternative for" [ full-name ] ;
+ select-alternatives $(property-set) debug ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ # Now return virtual targets for the only alternative.
+ return [ $(best-alternatives).generate $(property-set) ] ;
+ }
+ }
+
+ rule rename ( new-name )
+ {
+ abstract-target.rename $(new-name) ;
+ for local a in $(self.alternatives)
+ {
+ $(a).rename $(new-name) ;
+ }
+ }
+}
+
+
+# Abstract target refering to a source file. This is an artificial entity
+# allowing sources to a target to be represented using a list of abstract target
+# instances.
+#
+class file-reference : abstract-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule __init__ ( file : project )
+ {
+ abstract-target.__init__ $(file) : $(project) ;
+ }
+
+ rule generate ( properties )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ CHECK_IF_FILE [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+
+# Given a target-reference, made in context of 'project', returns the
+# abstract-target instance that is referred to, as well as properties explicitly
+# specified for this reference.
+#
+rule resolve-reference ( target-reference : project )
+{
+ # Separate target name from properties override.
+ local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
+ local id = $(split[1]) ;
+ local sproperties = ;
+ if $(split[3])
+ {
+ sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
+ sproperties = [ feature.expand-composites $(sproperties) ] ;
+ }
+
+ # Find the target.
+ local target = [ $(project).find $(id) ] ;
+
+ return $(target) [ property-set.create $(sproperties) ] ;
+}
+
+
+# Attempts to generate the target given by target reference, which can refer
+# both to a main target or to a file. Returns a list consisting of
+# - usage requirements
+# - generated virtual targets, if any
+#
+rule generate-from-reference (
+ target-reference # Target reference.
+ : project # Project where the reference is made.
+ : property-set # Properties of the main target that makes the reference.
+)
+{
+ local r = [ resolve-reference $(target-reference) : $(project) ] ;
+ local target = $(r[1]) ;
+ local sproperties = $(r[2]) ;
+
+ # Take properties which should be propagated and refine them with
+ # source-specific requirements.
+ local propagated = [ $(property-set).propagated ] ;
+ local rproperties = [ $(propagated).refine $(sproperties) ] ;
+ if $(rproperties[1]) = "@error"
+ {
+ errors.error
+ "When building" [ full-name ] " with properties " $(properties) :
+ "Invalid properties specified for " $(source) ":"
+ $(rproperties[2-]) ;
+ }
+ return [ $(target).generate $(rproperties) ] ;
+}
+
+rule apply-default-build ( property-set : default-build )
+{
+ # 1. First, see what properties from default-build are already present
+ # in property-set.
+
+ local raw = [ $(property-set).raw ] ;
+ local specified-features = $(raw:G) ;
+
+ local defaults-to-apply ;
+ for local d in [ $(default-build).raw ]
+ {
+ if ! $(d:G) in $(specified-features)
+ {
+ defaults-to-apply += $(d) ;
+ }
+ }
+
+ # 2. If there are any defaults to be applied, form a new build request.
+ # Pass it through to 'expand-no-defaults' since default-build might
+ # contain "release debug" resulting in two property-sets.
+ local result ;
+ if $(defaults-to-apply)
+ {
+ properties = [
+ build-request.expand-no-defaults
+
+ # We have to compress subproperties here to prevent property
+ # lists like:
+ #
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ #
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to a cross-product property combination. That may be an
+ # indication that build-request.expand-no-defaults is the wrong
+ # rule to use here.
+ [ feature.compress-subproperties $(raw) ]
+ $(defaults-to-apply)
+ ] ;
+
+ if $(properties)
+ {
+ for local p in $(properties)
+ {
+ result += [ property-set.create
+ [ feature.expand [ feature.split $(p) ] ] ] ;
+ }
+ }
+ else
+ {
+ result = [ property-set.empty ] ;
+ }
+ }
+ else
+ {
+ result = $(property-set) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a build request and requirements, return properties common to dependency
+# build request and target requirements.
+#
+# TODO: Document exactly what 'common properties' are, whether they should
+# include default property values, whether they should contain any conditional
+# properties or should those be already processed, etc. See whether there are
+# any differences between use cases with empty and non-empty build-request as
+# well as with requirements containing and those not containing any non-free
+# features.
+#
+rule common-properties ( build-request requirements )
+{
+ # For optimization, we add free requirements directly, without using a
+ # complex algorithm. This gives the complex algorithm a better chance of
+ # caching results.
+ local free = [ $(requirements).free ] ;
+ local non-free = [ property-set.create [ $(requirements).base ]
+ [ $(requirements).incidental ] ] ;
+
+ local key = .rp.$(build-request)-$(non-free) ;
+ if ! $($(key))
+ {
+ $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
+ }
+ result = [ $($(key)).add-raw $(free) ] ;
+}
+
+
+# Given a 'context' -- a set of already present properties, and 'requirements',
+# decide which extra properties should be applied to 'context'. For conditional
+# requirements, this means evaluating the condition. For indirect conditional
+# requirements, this means calling a rule. Ordinary requirements are always
+# applied.
+#
+# Handles the situation where evaluating one conditional requirement affects
+# conditions of another conditional requirements, such as:
+# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+#
+# If 'what' is 'refined' returns context refined with new requirements. If
+# 'what' is 'added' returns just the requirements to be applied.
+#
+rule evaluate-requirements ( requirements : context : what )
+{
+ # Apply non-conditional requirements. It is possible that further
+ # conditional requirement change a value set by non-conditional
+ # requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I am not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+
+ local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ;
+
+ local raw = [ $(context).raw ] ;
+ raw = [ property.refine $(raw) : $(unconditional) ] ;
+
+ # We have collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties should be added
+ # in order to satisfy rules (4)-(6) from the docs.
+
+ local conditionals = [ $(requirements).conditional ] ;
+ # The 'count' variable has one element for each conditional feature and for
+ # each occurrence of '<indirect-conditional>' feature. It is used as a loop
+ # counter: for each iteration of the loop before we remove one element and
+ # the property set should stabilize before we are done. It is assumed that
+ # #conditionals iterations should be enough for properties to propagate
+ # along conditions in any direction.
+ local count = $(conditionals)
+ [ $(requirements).get <conditional> ]
+ and-once-more ;
+
+ local added-requirements ;
+
+ local current = $(raw) ;
+
+ # It is assumed that ordinary conditional requirements can not add
+ # <conditional> properties (a.k.a. indirect conditional properties), and
+ # that rules referred to by <conditional> properties can not add new
+ # <conditional> properties. So the list of indirect conditionals does not
+ # change.
+ local indirect = [ $(requirements).get <conditional> ] ;
+ indirect = [ MATCH ^@(.*) : $(indirect) ] ;
+
+ local ok ;
+ while $(count)
+ {
+ # Evaluate conditionals in context of current properties.
+ local e = [ property.evaluate-conditionals-in-context $(conditionals)
+ : $(current) ] ;
+
+ # Evaluate indirect conditionals.
+ for local i in $(indirect)
+ {
+ e += [ indirect.call $(i) $(current) ] ;
+ }
+
+ if $(e) = $(added-requirements)
+ {
+ # If we got the same result, we have found the final properties.
+ count = ;
+ ok = true ;
+ }
+ else
+ {
+ # Oops, conditional evaluation results have changed. Also 'current'
+ # contains leftovers from a previous evaluation. Recompute 'current'
+ # using initial properties and conditional requirements.
+ added-requirements = $(e) ;
+ current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
+ }
+ count = $(count[2-]) ;
+ }
+ if ! $(ok)
+ {
+ errors.error "Can not evaluate conditional properties " $(conditionals) ;
+ }
+
+ if $(what) = added
+ {
+ return [ property-set.create $(unconditional) $(added-requirements) ] ;
+ }
+ else if $(what) = refined
+ {
+ return [ property-set.create $(current) ] ;
+ }
+ else
+ {
+ errors.error "Invalid value of the 'what' parameter." ;
+ }
+}
+
+
+rule common-properties2 ( build-request requirements )
+{
+ # This guarantees that default properties are present in the result, unless
+ # they are overriden by some requirement. FIXME: There is possibility that
+ # we have added <foo>bar, which is composite and expands to <foo2>bar2, but
+ # default value of <foo2> is not bar2, in which case it is not clear what to
+ # do.
+ #
+ build-request = [ $(build-request).add-defaults ] ;
+ # Features added by 'add-default' can be composite and expand to features
+ # without default values -- so they are not added yet. It could be clearer/
+ # /faster to expand only newly added properties but that is not critical.
+ build-request = [ $(build-request).expand ] ;
+
+ return [ evaluate-requirements $(requirements) : $(build-request) :
+ refined ] ;
+}
+
+rule push-target ( target )
+{
+ .targets = $(target) $(.targets) ;
+}
+
+rule pop-target ( )
+{
+ .targets = $(.targets[2-]) ;
+}
+
+# Return the metatarget that is currently being generated.
+rule current ( )
+{
+ return $(.targets[1]) ;
+}
+
+
+# Implements the most standard way of constructing main target alternative from
+# sources. Allows sources to be either file or other main target and handles
+# generation of those dependency targets.
+#
+class basic-target : abstract-target
+{
+ import build-request ;
+ import build-system ;
+ import "class" : new ;
+ import errors ;
+ import feature ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+
+ self.sources = $(sources) ;
+ if ! $(requirements) {
+ requirements = [ property-set.empty ] ;
+ }
+ self.requirements = $(requirements) ;
+ if ! $(default-build)
+ {
+ default-build = [ property-set.empty ] ;
+ }
+ self.default-build = $(default-build) ;
+ if ! $(usage-requirements)
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ self.usage-requirements = $(usage-requirements) ;
+
+ if $(sources:G)
+ {
+ errors.user-error properties found in the 'sources' parameter for
+ [ full-name ] ;
+ }
+ }
+
+ rule always ( )
+ {
+ self.always = 1 ;
+ }
+
+ # Returns the list of abstract-targets which are used as sources. The extra
+ # properties specified for sources are not represented. The only user for
+ # this rule at the moment is the "--dump-tests" feature of the test system.
+ #
+ rule sources ( )
+ {
+ if ! $(self.source-targets)
+ {
+ for local s in $(self.sources)
+ {
+ self.source-targets +=
+ [ targets.resolve-reference $(s) : $(self.project) ] ;
+ }
+ }
+ return $(self.source-targets) ;
+ }
+
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule default-build ( )
+ {
+ return $(self.default-build) ;
+ }
+
+ # Returns the alternative condition for this alternative, if the condition
+ # is satisfied by 'property-set'.
+ #
+ rule match ( property-set debug ? )
+ {
+ # The condition is composed of all base non-conditional properties. It
+ # is not clear if we should expand 'self.requirements' or not. For one
+ # thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
+ # in requirements. On the other hand, if we have <variant>release as a
+ # condition it does not make sense to require <optimization>full to be
+ # in the build request just to select this variant.
+ local bcondition = [ $(self.requirements).base ] ;
+ local ccondition = [ $(self.requirements).conditional ] ;
+ local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
+ if $(debug)
+ {
+ ECHO " next alternative: required properties:" $(condition:E=(empty)) ;
+ }
+
+ if $(condition) in [ $(property-set).raw ]
+ {
+ if $(debug)
+ {
+ ECHO " matched" ;
+ }
+ return $(condition) ;
+ }
+ else
+ {
+ if $(debug)
+ {
+ ECHO " not matched" ;
+ }
+ return no-match ;
+ }
+ }
+
+ # Takes a target reference, which might be either target id or a dependency
+ # property, and generates that target using 'property-set' as build request.
+ #
+ # The results are added to the variable called 'result-var'. Usage
+ # requirements are added to the variable called 'usage-requirements-var'.
+ #
+ rule generate-dependencies ( dependencies * : property-set
+ : result-var usage-requirements-var )
+ {
+ for local dependency in $(dependencies)
+ {
+ local grist = $(dependency:G) ;
+ local id = $(dependency:G=) ;
+
+ local result = [ targets.generate-from-reference $(id) :
+ $(self.project) : $(property-set) ] ;
+
+ $(result-var) += $(result[2-]:G=$(grist)) ;
+ $(usage-requirements-var) += [ $(result[1]).raw ] ;
+ }
+ }
+
+ # Determines final build properties, generates sources, and calls
+ # 'construct'. This method should not be overridden.
+ #
+ rule generate ( property-set )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ local fn = [ full-name ] ;
+ ECHO [ targets.indent ] "Building target '$(fn)'" ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Build request: " $(property-set) [ $(property-set).raw ] ;
+ local cf = [ build-system.command-line-free-features ] ;
+ ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ;
+ ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
+ }
+ targets.push-target $(__name__) ;
+
+ if ! $(self.generated.$(property-set))
+ {
+ # Apply free features from the command line. If user said
+ # define=FOO
+ # he most likely wants this define to be set for all compiles.
+ property-set = [ $(property-set).refine
+ [ build-system.command-line-free-features ] ] ;
+ local rproperties = [ targets.common-properties $(property-set)
+ $(self.requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Common properties: " [ $(rproperties).raw ] ;
+ }
+
+ if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
+ <build> ] != no )
+ {
+ local source-targets ;
+ local properties = [ $(rproperties).non-dependency ] ;
+ local usage-requirements ;
+
+ generate-dependencies [ $(rproperties).dependency ] :
+ $(rproperties) : properties usage-requirements ;
+
+ generate-dependencies $(self.sources) : $(rproperties) :
+ source-targets usage-requirements ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Usage requirements for"
+ $(self.name)": " $(usage-requirements) ;
+ }
+
+ rproperties = [ property-set.create $(properties)
+ $(usage-requirements) ] ;
+ usage-requirements = [ property-set.create $(usage-requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Build properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ local extra = [ $(rproperties).get <source> ] ;
+ source-targets += $(extra:G=) ;
+ # We might get duplicate sources, for example if we link to two
+ # libraries having the same <library> usage requirement.
+ # Use stable sort, since for some targets the order is
+ # important. E.g. RUN_PY target need python source to come
+ # first.
+ source-targets = [ sequence.unique $(source-targets) : stable ] ;
+
+ local result = [ construct $(self.name) : $(source-targets) :
+ $(rproperties) ] ;
+
+ if $(result)
+ {
+ local gur = $(result[1]) ;
+ result = $(result[2-]) ;
+
+ if $(self.always)
+ {
+ for local t in $(result)
+ {
+ $(t).always ;
+ }
+ }
+
+ local s = [ create-subvariant $(result)
+ : [ virtual-target.recent-targets ]
+ : $(property-set) : $(source-targets)
+ : $(rproperties) : $(usage-requirements) ] ;
+ virtual-target.clear-recent-targets ;
+
+ local ur = [ compute-usage-requirements $(s) ] ;
+ ur = [ $(ur).add $(gur) ] ;
+ $(s).set-usage-requirements $(ur) ;
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ }
+
+ self.generated.$(property-set) = $(ur) $(result) ;
+ }
+ }
+ else
+ {
+ if $(rproperties[1]) = "@error"
+ {
+ ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
+ "cannot compute common properties" ;
+ }
+ else if [ $(rproperties).get <build> ] = no
+ {
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+ }
+ else
+ {
+ ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
+ " unknown reason" ;
+ }
+
+ # We are here either because there has been an error computing
+ # properties or there is <build>no in properties. In the latter
+ # case we do not want any diagnostic. In the former case, we
+ # need diagnostics. FIXME
+
+ # If this target fails to build, add <build>no to properties to
+ # cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it is not clear if that is a good idea anyway. The alias
+ # target, for example, should not fail to build if a
+ # dependency fails.
+ self.generated.$(property-set) = [ property-set.create <build>no ] ;
+ }
+ }
+ else
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Already built" ;
+ local ur = $(self.generated.$(property-set)) ;
+ ur = $(ur[0]) ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ targets.decrease-indent ;
+ }
+ }
+
+ targets.pop-target ;
+ targets.decrease-indent ;
+ return $(self.generated.$(property-set)) ;
+ }
+
+ # Given the set of generated targets, and refined build properties,
+ # determines and sets appropriate usage requirements on those targets.
+ #
+ rule compute-usage-requirements ( subvariant )
+ {
+ local rproperties = [ $(subvariant).build-properties ] ;
+ xusage-requirements = [ targets.evaluate-requirements
+ $(self.usage-requirements) : $(rproperties) : added ] ;
+
+ # We generate all dependency properties and add them, as well as their
+ # usage requirements, to the result.
+ local extra ;
+ generate-dependencies [ $(xusage-requirements).dependency ] :
+ $(rproperties) : extra extra ;
+
+ local result = [ property-set.create
+ [ $(xusage-requirements).non-dependency ] $(extra) ] ;
+
+ # Propagate usage requirements we got from sources, except for the
+ # <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply only to
+ # direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements, they
+ # are propagated only to direct dependents. We might need a more general
+ # mechanism, but for now, only those two features are special.
+ #
+ # TODO - Actually there are more possible candidates like for instance
+ # when listing static library X as a source for another static library.
+ # Then static library X will be added as a <source> property to the
+ # second library's usage requirements but those requirements should last
+ # only up to the first executable or shared library that actually links
+ # to it.
+ local raw = [ $(subvariant).sources-usage-requirements ] ;
+ raw = [ $(raw).raw ] ;
+ raw = [ property.change $(raw) : <pch-header> ] ;
+ raw = [ property.change $(raw) : <pch-file> ] ;
+ return [ $(result).add [ property-set.create $(raw) ] ] ;
+ }
+
+ # Creates new subvariant instances for 'targets'.
+ # 'root-targets' - virtual targets to be returned to dependants
+ # 'all-targets' - virtual targets created while building this main target
+ # 'build-request' - property-set instance with requested build properties
+ #
+ local rule create-subvariant ( root-targets * : all-targets * :
+ build-request : sources * : rproperties : usage-requirements )
+ {
+ for local e in $(root-targets)
+ {
+ $(e).root true ;
+ }
+
+ # Process all virtual targets that will be created if this main target
+ # is created.
+ local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
+ $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
+ for local v in $(all-targets)
+ {
+ if ! [ $(v).creating-subvariant ]
+ {
+ $(v).creating-subvariant $(s) ;
+ }
+ }
+ return $(s) ;
+ }
+
+ # Constructs virtual targets for this abstract target and the dependency
+ # graph. Returns a usage-requirements property-set and a list of virtual
+ # targets. Should be overriden in derived classes.
+ #
+ rule construct ( name : source-targets * : properties * )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+class typed-target : basic-target
+{
+ import generators ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ self.type = $(type) ;
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ]
+ <main-target-type>$(self.type) ]
+ : $(source-targets) : true ] ;
+ if ! $(r)
+ {
+ ECHO "warn: Unable to construct" [ full-name ] ;
+
+ # Are there any top-level generators for this type/property set.
+ if ! [ generators.find-viable-generators $(self.type)
+ : $(property-set) ]
+ {
+ ECHO "error: no generators were found for type '$(self.type)'" ;
+ ECHO "error: and the requested properties" ;
+ ECHO "error: make sure you've configured the needed tools" ;
+ ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+ ECHO "To debug this problem, try the --debug-generators option." ;
+ EXIT ;
+ }
+ }
+ return $(r) ;
+ }
+}
+
+
+# Return the list of sources to use, if main target rule is invoked with
+# 'sources'. If there are any objects in 'sources', they are treated as main
+# target instances, and the name of such targets are adjusted to be
+# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
+# a non-empty value is passed as the 'no-renaming' parameter.
+#
+rule main-target-sources ( sources * : main-target-name : no-renaming ? )
+{
+ local result ;
+ for local t in $(sources)
+ {
+ if [ class.is-instance $(t) ]
+ {
+ local name = [ $(t).name ] ;
+ if ! $(no-renaming)
+ {
+ name = $(main-target-name)__$(name) ;
+ $(t).rename $(name) ;
+ }
+ # Inline targets are not built by default.
+ local p = [ $(t).project ] ;
+ $(p).mark-target-as-explicit $(name) ;
+ result += $(name) ;
+ }
+ else
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the requirements to use when declaring a main target, obtained by
+# translating all specified property paths and refining project requirements
+# with the ones specified for the target.
+#
+rule main-target-requirements (
+ specification * # Properties explicitly specified for the main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ specification += [ toolset.requirements ] ;
+
+ local requirements = [ property-set.refine-from-user-input
+ [ $(project).get requirements ] : $(specification) :
+ [ $(project).project-module ] : [ $(project).get location ] ] ;
+ if $(requirements[1]) = "@error"
+ {
+ errors.error "Conflicting requirements for target:" $(requirements) ;
+ }
+ return $(requirements) ;
+}
+
+
+# Returns the usage requirements to use when declaring a main target, which are
+# obtained by translating all specified property paths and adding project's
+# usage requirements.
+#
+rule main-target-usage-requirements (
+ specification * # Use-properties explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local project-usage-requirements = [ $(project).get usage-requirements ] ;
+
+ # We do not use 'refine-from-user-input' because:
+ # - I am not sure if removing parent's usage requirements makes sense
+ # - refining usage requirements is not needed, since usage requirements are
+ # always free.
+ local usage-requirements = [ property-set.create-from-user-input
+ $(specification)
+ : [ $(project).project-module ] [ $(project).get location ] ] ;
+
+ return [ $(project-usage-requirements).add $(usage-requirements) ] ;
+}
+
+
+# Return the default build value to use when declaring a main target, which is
+# obtained by using the specified value if not empty and parent's default build
+# attribute otherwise.
+#
+rule main-target-default-build (
+ specification * # Default build explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local result ;
+ if $(specification)
+ {
+ result = $(specification) ;
+ }
+ else
+ {
+ result = [ $(project).get default-build ] ;
+ }
+ return [ property-set.create-with-validation $(result) ] ;
+}
+
+
+# Registers the specified target as a main target alternative and returns it.
+#
+rule main-target-alternative ( target )
+{
+ local ptarget = [ $(target).project ] ;
+ $(ptarget).add-alternative $(target) ;
+ return $(target) ;
+}
+
+# Creates a new metargets with the specified properties, using 'klass' as
+# the class. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in Jamfile corresponding to 'project'.
+#
+rule create-metatarget ( klass : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [
+ targets.main-target-alternative
+ [ new $(klass) $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+# Creates a typed-target with the specified properties. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in Jamfile corresponding to 'project'.
+#
+rule create-typed-target ( type : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : $(type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
diff --git a/jam-files/boost-build/build/toolset.jam b/jam-files/boost-build/build/toolset.jam
new file mode 100644
index 000000000..f2036d999
--- /dev/null
+++ b/jam-files/boost-build/build/toolset.jam
@@ -0,0 +1,502 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for toolset definition.
+
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import path ;
+import property ;
+import regex ;
+import sequence ;
+import set ;
+
+
+.flag-no = 1 ;
+
+.ignore-requirements = ;
+
+# This is used only for testing, to make sure we do not get random extra
+# elements in paths.
+if --ignore-toolset-requirements in [ modules.peek : ARGV ]
+{
+ .ignore-requirements = 1 ;
+}
+
+
+# Initializes an additional toolset-like module. First load the 'toolset-module'
+# and then calls its 'init' rule with trailing arguments.
+#
+rule using ( toolset-module : * )
+{
+ import $(toolset-module) ;
+ $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
+# converted to '<toolset>gcc/<toolset-version>3.2'.
+#
+local rule normalize-condition ( property-sets * )
+{
+ local result ;
+ for local p in $(property-sets)
+ {
+ local split = [ feature.split $(p) ] ;
+ local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
+ result += $(expanded:J=/) ;
+ }
+ return $(result) ;
+}
+
+
+# Specifies if the 'flags' rule should check that the invoking module is the
+# same as the module we are setting the flag for. 'v' can be either 'checked' or
+# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
+# the setting that was in effect before calling this rule.
+#
+rule push-checking-for-flags-module ( v )
+{
+ .flags-module-checking = $(v) $(.flags-module-checking) ;
+}
+
+rule pop-checking-for-flags-module ( )
+{
+ .flags-module-checking = $(.flags-module-checking[2-]) ;
+}
+
+
+# Specifies the flags (variables) that must be set on targets under certain
+# conditions, described by arguments.
+#
+rule flags (
+ rule-or-module # If contains a dot, should be a rule name. The flags will
+ # be applied when that rule is used to set up build
+ # actions.
+ #
+ # If does not contain dot, should be a module name. The
+ # flag will be applied for all rules in that module. If
+ # module for rule is different from the calling module, an
+ # error is issued.
+
+ variable-name # Variable that should be set on target.
+ condition * : # A condition when this flag should be applied. Should be a
+ # set of property sets. If one of those property sets is
+ # contained in the build properties, the flag will be used.
+ # Implied values are not allowed: "<toolset>gcc" should be
+ # used, not just "gcc". Subfeatures, like in
+ # "<toolset>gcc-3.2" are allowed. If left empty, the flag
+ # will be used unconditionally.
+ #
+ # Propery sets may use value-less properties ('<a>' vs.
+ # '<a>value') to match absent properties. This allows to
+ # separately match:
+ #
+ # <architecture>/<address-model>64
+ # <architecture>ia64/<address-model>
+ #
+ # Where both features are optional. Without this syntax
+ # we would be forced to define "default" values.
+
+ values * : # The value to add to variable. If <feature> is specified,
+ # then the value of 'feature' will be added.
+ unchecked ? # If value 'unchecked' is passed, will not test that flags
+ # are set for the calling module.
+ : hack-hack ? # For
+ # flags rule OPTIONS <cxx-abi> : -model ansi
+ # Treat <cxx-abi> as condition
+ # FIXME: ugly hack.
+)
+{
+ local caller = [ CALLER_MODULE ] ;
+ if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
+ && [ MATCH "(Jamfile<.*)" : $(caller) ]
+ {
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule-or-module = $(caller).$(rule-or-module) ;
+ }
+ else
+ {
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ if $(unchecked) != unchecked
+ && $(.flags-module-checking[1]) != unchecked
+ && $(module_) != $(caller)
+ {
+ errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
+ }
+ }
+
+ if $(condition) && ! $(condition:G=) && ! $(hack-hack)
+ {
+ # We have condition in the form '<feature>', that is, without value.
+ # That is an older syntax:
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = $(condition) ;
+ condition = ;
+ }
+
+ if $(condition)
+ {
+ property.validate-property-sets $(condition) ;
+ condition = [ normalize-condition $(condition) ] ;
+ }
+
+ add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
+}
+
+
+# Adds a new flag setting with the specified values. Does no checking.
+#
+local rule add-flag ( rule-or-module : variable-name : condition * : values * )
+{
+ .$(rule-or-module).flags += $(.flag-no) ;
+
+ # Store all flags for a module.
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ .module-flags.$(module_) += $(.flag-no) ;
+ # Store flag-no -> rule-or-module mapping.
+ .rule-or-module.$(.flag-no) = $(rule-or-module) ;
+
+ .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
+ .$(rule-or-module).values.$(.flag-no) += $(values) ;
+ .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
+
+ .flag-no = [ numbers.increment $(.flag-no) ] ;
+}
+
+
+# Returns the first element of 'property-sets' which is a subset of
+# 'properties' or an empty list if no such element exists.
+#
+rule find-property-subset ( property-sets * : properties * )
+{
+ # Cut property values off.
+ local prop-keys = $(properties:G) ;
+
+ local result ;
+ for local s in $(property-sets)
+ {
+ if ! $(result)
+ {
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+
+ local set = [ feature.split $(s) ] ;
+
+ # Find the set of features that
+ # - have no property specified in required property set
+ # - are omitted in the build property set.
+ local default-props ;
+ for local i in $(set)
+ {
+ # If $(i) is a value-less property it should match default value
+ # of an optional property. See the first line in the example
+ # below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
+ {
+ default-props += $(i) ;
+ }
+ }
+
+ if $(set) in $(properties) $(default-props)
+ {
+ result = $(s) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a value to be added to some flag for some target based on the flag's
+# value definition and the given target's property set.
+#
+rule handle-flag-value ( value * : properties * )
+{
+ local result ;
+ if $(value:G)
+ {
+ local matches = [ property.select $(value) : $(properties) ] ;
+ for local p in $(matches)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if dependency in $(att)
+ {
+ # The value of a dependency feature is a target and needs to be
+ # actualized.
+ result += [ $(p:G=).actualize ] ;
+ }
+ else if path in $(att) || free in $(att)
+ {
+ local values ;
+ # Treat features with && in the value specially -- each
+ # &&-separated element is considered a separate value. This is
+ # needed to handle searched libraries or include paths, which
+ # may need to be in a specific order.
+ if ! [ MATCH (&&) : $(p:G=) ]
+ {
+ values = $(p:G=) ;
+ }
+ else
+ {
+ values = [ regex.split $(p:G=) "&&" ] ;
+ }
+ if path in $(att)
+ {
+ result += [ sequence.transform path.native : $(values) ] ;
+ }
+ else
+ {
+ result += $(values) ;
+ }
+ }
+ else
+ {
+ result += $(p:G=) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(value) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a rule name and a property set, returns a list of interleaved variables
+# names and values which must be set on targets for that rule/property-set
+# combination.
+#
+rule set-target-variables-aux ( rule-or-module : property-set )
+{
+ local result ;
+ properties = [ $(property-set).raw ] ;
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local variable = $(.$(rule-or-module).variable.$(f)) ;
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ if ! $(condition) ||
+ [ find-property-subset $(condition) : $(properties) ]
+ {
+ local processed ;
+ for local v in $(values)
+ {
+ # The value might be <feature-name> so needs special treatment.
+ processed += [ handle-flag-value $(v) : $(properties) ] ;
+ }
+ for local r in $(processed)
+ {
+ result += $(variable) $(r) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule set-target-variables ( rule-or-module targets + : property-set )
+{
+ properties = [ $(property-set).raw ] ;
+ local key = $(rule-or-module).$(property-set) ;
+ local settings = $(.stv.$(key)) ;
+ if ! $(settings)
+ {
+ settings = [ set-target-variables-aux $(rule-or-module) :
+ $(property-set) ] ;
+
+ if ! $(settings)
+ {
+ settings = none ;
+ }
+ .stv.$(key) = $(settings) ;
+ }
+
+ if $(settings) != none
+ {
+ local var-name = ;
+ for local name-or-value in $(settings)
+ {
+ if $(var-name)
+ {
+ $(var-name) on $(targets) += $(name-or-value) ;
+ var-name = ;
+ }
+ else
+ {
+ var-name = $(name-or-value) ;
+ }
+ }
+ }
+}
+
+
+# Make toolset 'toolset', defined in a module of the same name, inherit from
+# 'base'.
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
+# Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
+# property in requires is adjusted too.
+# 3. All flags are inherited.
+# 4. All rules are imported.
+#
+rule inherit ( toolset : base )
+{
+ import $(base) ;
+ inherit-generators $(toolset) : $(base) ;
+ inherit-flags $(toolset) : $(base) ;
+ inherit-rules $(toolset) : $(base) ;
+}
+
+
+rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
+{
+ properties ?= <toolset>$(toolset) ;
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ for local g in $(base-generators)
+ {
+ local id = [ $(g).id ] ;
+
+ if ! $(id) in $(generators-to-ignore)
+ {
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
+ # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
+ # not what we want. Manually parse the base and suffix. If there is
+ # a better way to do this, I would love to see it. See also the
+ # register() rule in the generators module.
+ local base = $(id) ;
+ local suffix = "" ;
+ while $(base:S)
+ {
+ suffix = $(base:S)$(suffix) ;
+ base = $(base:B) ;
+ }
+ local new-id = $(toolset)$(suffix) ;
+
+ generators.register [ $(g).clone $(new-id) : $(properties) ] ;
+ }
+ }
+}
+
+
+# Brings all flag definitions from the 'base' toolset into the 'toolset'
+# toolset. Flag definitions whose conditions make use of properties in
+# 'prohibited-properties' are ignored. Do not confuse property and feature, for
+# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+# not block the other one.
+#
+# The flag conditions are not altered at all, so if a condition includes a name,
+# or version of a base toolset, it will not ever match the inheriting toolset.
+# When such flag settings must be inherited, define a rule in base toolset
+# module and call it as needed.
+#
+rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
+{
+ for local f in $(.module-flags.$(base))
+ {
+ local rule-or-module = $(.rule-or-module.$(f)) ;
+ if ( [ set.difference
+ $(.$(rule-or-module).condition.$(f)) :
+ $(prohibited-properties) ]
+ || ! $(.$(rule-or-module).condition.$(f))
+ ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
+ {
+ local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
+ local new-rule-or-module ;
+ if $(rule_)
+ {
+ new-rule-or-module = $(toolset).$(rule_) ;
+ }
+ else
+ {
+ new-rule-or-module = $(toolset) ;
+ }
+
+ add-flag
+ $(new-rule-or-module)
+ : $(.$(rule-or-module).variable.$(f))
+ : $(.$(rule-or-module).condition.$(f))
+ : $(.$(rule-or-module).values.$(f)) ;
+ }
+ }
+}
+
+
+rule inherit-rules ( toolset : base : localize ? )
+{
+ # It appears that "action" creates a local rule.
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ local rules ;
+ for local g in $(base-generators)
+ {
+ rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
+ }
+ rules = [ sequence.unique $(rules) ] ;
+ IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
+ IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
+}
+
+
+# Return the list of global 'toolset requirements'. Those requirements will be
+# automatically added to the requirements of any main target.
+#
+rule requirements ( )
+{
+ return $(.requirements) ;
+}
+
+
+# Adds elements to the list of global 'toolset requirements'. The requirements
+# will be automatically added to the requirements for all main targets, as if
+# they were specified literally. For best results, all requirements added should
+# be conditional or indirect conditional.
+#
+rule add-requirements ( requirements * )
+{
+ if ! $(.ignore-requirements)
+ {
+ .requirements += $(requirements) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
+ assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
+ assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
+
+ local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
+ assert.result <a>/<b> : find-property-subset $(p-set) : ;
+ assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
+ assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
+ assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
+}
diff --git a/jam-files/boost-build/build/type.jam b/jam-files/boost-build/build/type.jam
new file mode 100644
index 000000000..1a7a57823
--- /dev/null
+++ b/jam-files/boost-build/build/type.jam
@@ -0,0 +1,425 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Deals with target type declaration and defines target class which supports
+# typed targets.
+
+import "class" : new ;
+import errors ;
+import feature ;
+import generators : * ;
+import project ;
+import property ;
+import scanner ;
+import os ;
+
+# The following import would create a circular dependency:
+# project -> project-root -> builtin -> type -> targets -> project
+# import targets ;
+
+# The feature is optional so it would never get added implicitly. It is used
+# only for internal purposes and in all cases we want to use it explicitly.
+feature.feature target-type : : composite optional ;
+
+feature.feature main-target-type : : optional incidental ;
+feature.feature base-target-type : : composite optional free ;
+
+
+# Registers a target type, possible derived from a 'base-type'. Providing a list
+# of 'suffixes' here is a shortcut for separately calling the register-suffixes
+# rule with the given suffixes and the set-generated-target-suffix rule with the
+# first given suffix.
+#
+rule register ( type : suffixes * : base-type ? )
+{
+ # Type names cannot contain hyphens, because when used as feature-values
+ # they would be interpreted as composite features which need to be
+ # decomposed.
+ switch $(type)
+ {
+ case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
+ }
+
+ if $(type) in $(.types)
+ {
+ errors.error "Type $(type) is already registered." ;
+ }
+ else
+ {
+ .types += $(type) ;
+ .base.$(type) = $(base-type) ;
+ .derived.$(base-type) += $(type) ;
+
+ if $(suffixes)-is-not-empty
+ {
+ # Specify mapping from suffixes to type.
+ register-suffixes $(suffixes) : $(type) ;
+ # By default generated targets of 'type' will use the first of
+ #'suffixes'. This may be overriden.
+ set-generated-target-suffix $(type) : : $(suffixes[1]) ;
+ }
+
+ feature.extend target-type : $(type) ;
+ feature.extend main-target-type : $(type) ;
+ feature.extend base-target-type : $(type) ;
+
+ feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
+ feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
+
+ # We used to declare the main target rule only when a 'main' parameter
+ # has been specified. However, it is hard to decide that a type will
+ # *never* need a main target rule and so from time to time we needed to
+ # make yet another type 'main'. So now a main target rule is defined for
+ # each type.
+ main-rule-name = [ type-to-rule-name $(type) ] ;
+ .main-target-type.$(main-rule-name) = $(type) ;
+ IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
+
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update-cached-information-with-a-new-type $(type) ;
+ }
+}
+
+
+# Given a type, returns the name of the main target rule which creates targets
+# of that type.
+#
+rule type-to-rule-name ( type )
+{
+ # Lowercase everything. Convert underscores to dashes.
+ import regex ;
+ local n = [ regex.split $(type:L) "_" ] ;
+ return $(n:J=-) ;
+}
+
+
+# Given a main target rule name, returns the type for which it creates targets.
+#
+rule type-from-rule-name ( rule-name )
+{
+ return $(.main-target-type.$(rule-name)) ;
+}
+
+
+# Specifies that files with suffix from 'suffixes' be recognized as targets of
+# type 'type'. Issues an error if a different type is already specified for any
+# of the suffixes.
+#
+rule register-suffixes ( suffixes + : type )
+{
+ for local s in $(suffixes)
+ {
+ if ! $(.type.$(s))
+ {
+ .type.$(s) = $(type) ;
+ }
+ else if $(.type.$(s)) != $(type)
+ {
+ errors.error Attempting to specify multiple types for suffix
+ \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
+ }
+ }
+}
+
+
+# Returns true iff type has been registered.
+#
+rule registered ( type )
+{
+ if $(type) in $(.types)
+ {
+ return true ;
+ }
+}
+
+
+# Issues an error if 'type' is unknown.
+#
+rule validate ( type )
+{
+ if ! [ registered $(type) ]
+ {
+ errors.error "Unknown target type $(type)" ;
+ }
+}
+
+
+# Sets a scanner class that will be used for this 'type'.
+#
+rule set-scanner ( type : scanner )
+{
+ validate $(type) ;
+ .scanner.$(type) = $(scanner) ;
+}
+
+
+# Returns a scanner instance appropriate to 'type' and 'properties'.
+#
+rule get-scanner ( type : property-set )
+{
+ if $(.scanner.$(type))
+ {
+ return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
+ }
+}
+
+
+# Returns a base type for the given type or nothing in case the given type is
+# not derived.
+#
+rule base ( type )
+{
+ return $(.base.$(type)) ;
+}
+
+
+# Returns the given type and all of its base types in order of their distance
+# from type.
+#
+rule all-bases ( type )
+{
+ local result = $(type) ;
+ while $(type)
+ {
+ type = [ base $(type) ] ;
+ result += $(type) ;
+ }
+ return $(result) ;
+}
+
+
+# Returns the given type and all of its derived types in order of their distance
+# from type.
+#
+rule all-derived ( type )
+{
+ local result = $(type) ;
+ for local d in $(.derived.$(type))
+ {
+ result += [ all-derived $(d) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
+# indirect base.
+#
+rule is-derived ( type base )
+{
+ if $(base) in [ all-bases $(type) ]
+ {
+ return true ;
+ }
+}
+
+# Returns true if 'type' is either derived from or is equal to 'base'.
+#
+# TODO: It might be that is-derived and is-subtype were meant to be different
+# rules - one returning true for type = base and one not, but as currently
+# implemented they are actually the same. Clean this up.
+#
+rule is-subtype ( type base )
+{
+ return [ is-derived $(type) $(base) ] ;
+}
+
+
+# Store suffixes for generated targets.
+.suffixes = [ new property-map ] ;
+
+# Store prefixes for generated targets (e.g. "lib" for library).
+.prefixes = [ new property-map ] ;
+
+
+# Sets a file suffix to be used when generating a target of 'type' with the
+# specified properties. Can be called with no properties if no suffix has
+# already been specified for the 'type'. The 'suffix' parameter can be an empty
+# string ("") to indicate that no suffix should be used.
+#
+# Note that this does not cause files with 'suffix' to be automatically
+# recognized as being of 'type'. Two different types can use the same suffix for
+# their generated files but only one type can be auto-detected for a file with
+# that suffix. User should explicitly specify which one using the
+# register-suffixes rule.
+#
+rule set-generated-target-suffix ( type : properties * : suffix )
+{
+ set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Change the suffix previously registered for this type/properties combination.
+# If suffix is not yet specified, sets it.
+#
+rule change-generated-target-suffix ( type : properties * : suffix )
+{
+ change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Returns the suffix used when generating a file of 'type' with the given
+# properties.
+#
+rule generated-target-suffix ( type : property-set )
+{
+ return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
+}
+
+
+# Sets a target prefix that should be used when generating targets of 'type'
+# with the specified properties. Can be called with empty properties if no
+# prefix for 'type' has been specified yet.
+#
+# The 'prefix' parameter can be empty string ("") to indicate that no prefix
+# should be used.
+#
+# Usage example: library names use the "lib" prefix on unix.
+#
+rule set-generated-target-prefix ( type : properties * : prefix )
+{
+ set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
+#
+rule change-generated-target-prefix ( type : properties * : prefix )
+{
+ change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+rule generated-target-prefix ( type : property-set )
+{
+ return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
+}
+
+
+# Common rules for prefix/suffix provisioning follow.
+
+local rule set-generated-target-ps ( ps : type : properties * : psval )
+{
+ properties = <target-type>$(type) $(properties) ;
+ $(.$(ps)es).insert $(properties) : $(psval) ;
+}
+
+
+local rule change-generated-target-ps ( ps : type : properties * : psval )
+{
+ properties = <target-type>$(type) $(properties) ;
+ local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
+ if ! $(prev)
+ {
+ set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
+ }
+}
+
+
+# Returns either prefix or suffix (as indicated by 'ps') that should be used
+# when generating a target of 'type' with the specified properties. Parameter
+# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
+# 'type', returns prefix/suffix for base type, if any.
+#
+local rule generated-target-ps-real ( ps : type : properties * )
+{
+ local result ;
+ local found ;
+ while $(type) && ! $(found)
+ {
+ result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
+ # If the prefix/suffix is explicitly set to an empty string, we consider
+ # prefix/suffix to be found. If we were not to compare with "", there
+ # would be no way to specify an empty prefix/suffix.
+ if $(result)-is-not-empty
+ {
+ found = true ;
+ }
+ type = $(.base.$(type)) ;
+ }
+ if $(result) = ""
+ {
+ result = ;
+ }
+ return $(result) ;
+}
+
+
+local rule generated-target-ps ( ps : type : property-set )
+{
+ local key = .$(ps).$(type).$(property-set) ;
+ local v = $($(key)) ;
+ if ! $(v)
+ {
+ v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ]
+ ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+}
+
+
+# Returns file type given its name. If there are several dots in filename, tries
+# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
+# be tried.
+#
+rule type ( filename )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ filename = $(filename:L) ;
+ }
+ local type ;
+ while ! $(type) && $(filename:S)
+ {
+ local suffix = $(filename:S) ;
+ type = $(.type$(suffix)) ;
+ filename = $(filename:S=) ;
+ }
+ return $(type) ;
+}
+
+
+# Rule used to construct all main targets. Note that this rule gets imported
+# into the global namespace under different alias names and the exact target
+# type to construct is selected based on the alias used to actually invoke this
+# rule.
+#
+rule main-target-rule ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type-from-rule-name $(rulename) ] ;
+
+ # This is a circular module dependency and so must be imported here.
+ import targets ;
+
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ # TODO: Add tests for all the is-derived, is-base & related type relation
+ # checking rules.
+}
diff --git a/jam-files/boost-build/build/version.jam b/jam-files/boost-build/build/version.jam
new file mode 100644
index 000000000..7626ddda8
--- /dev/null
+++ b/jam-files/boost-build/build/version.jam
@@ -0,0 +1,161 @@
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import numbers ;
+
+major = "2011" ;
+minor = "04" ;
+
+rule boost-build ( )
+{
+ return "$(major).$(minor)-svn" ;
+}
+
+rule print ( )
+{
+ if [ verify-engine-version ]
+ {
+ ECHO "Boost.Build" [ boost-build ] ;
+ }
+}
+
+rule verify-engine-version ( )
+{
+ local v = [ modules.peek : JAM_VERSION ] ;
+
+ if $(v[1]) != $(major) || $(v[2]) != $(minor)
+ {
+ local argv = [ modules.peek : ARGV ] ;
+ local e = $(argv[1]) ;
+ local l = [ modules.binding version ] ;
+ l = $(l:D) ;
+ l = $(l:D) ;
+ ECHO "warning: mismatched versions of Boost.Build engine and core" ;
+ ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
+ ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
+ }
+ else
+ {
+ return true ;
+ }
+}
+
+
+
+# Utility rule for testing whether all elements in a sequence are equal to 0.
+#
+local rule is-all-zeroes ( sequence * )
+{
+ local result = "true" ;
+ for local e in $(sequence)
+ {
+ if $(e) != "0"
+ {
+ result = "" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns "true" if the first version is less than the second one.
+#
+rule version-less ( lhs + : rhs + )
+{
+ numbers.check $(lhs) ;
+ numbers.check $(rhs) ;
+
+ local done ;
+ local result ;
+
+ while ! $(done) && $(lhs) && $(rhs)
+ {
+ if [ numbers.less $(lhs[1]) $(rhs[1]) ]
+ {
+ done = "true" ;
+ result = "true" ;
+ }
+ else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
+ {
+ done = "true" ;
+ }
+ else
+ {
+ lhs = $(lhs[2-]) ;
+ rhs = $(rhs[2-]) ;
+ }
+ }
+ if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
+ {
+ result = "true" ;
+ }
+
+ return $(result) ;
+}
+
+
+# Returns "true" if the current JAM version version is at least the given
+# version.
+#
+rule check-jam-version ( version + )
+{
+ local version-tag = $(version:J=.) ;
+ if ! $(version-tag)
+ {
+ errors.error Invalid version specifier: : $(version:E="(undefined)") ;
+ }
+
+ if ! $(.jam-version-check.$(version-tag))-is-not-empty
+ {
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ if ! $(jam-version)
+ {
+ errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
+ "installation is most likely terribly outdated." ;
+ }
+ .jam-version-check.$(version-tag) = "true" ;
+ if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
+ {
+ .jam-version-check.$(version-tag) = "" ;
+ }
+ }
+ return $(.jam-version-check.$(version-tag)) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ local future-version = $(jam-version) ;
+ future-version += "1" ;
+
+ assert.true check-jam-version $(jam-version) ;
+ assert.false check-jam-version $(future-version) ;
+
+ assert.true version-less 0 : 1 ;
+ assert.false version-less 0 : 0 ;
+ assert.true version-less 1 : 2 ;
+ assert.false version-less 1 : 1 ;
+ assert.false version-less 2 : 1 ;
+ assert.true version-less 3 1 20 : 3 4 10 ;
+ assert.false version-less 3 1 10 : 3 1 10 ;
+ assert.false version-less 3 4 10 : 3 1 20 ;
+ assert.true version-less 3 1 20 5 1 : 3 4 10 ;
+ assert.false version-less 3 1 10 5 1 : 3 1 10 ;
+ assert.false version-less 3 4 10 5 1 : 3 1 20 ;
+ assert.true version-less 3 1 20 : 3 4 10 5 1 ;
+ assert.true version-less 3 1 10 : 3 1 10 5 1 ;
+ assert.false version-less 3 4 10 : 3 1 20 5 1 ;
+ assert.false version-less 3 1 10 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 0 : 3 1 10 ;
+ assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
+ assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
+
+ # TODO: Add tests for invalid input data being sent to version-less.
+}
diff --git a/jam-files/boost-build/build/virtual-target.jam b/jam-files/boost-build/build/virtual-target.jam
new file mode 100644
index 000000000..2e8446bcc
--- /dev/null
+++ b/jam-files/boost-build/build/virtual-target.jam
@@ -0,0 +1,1317 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements virtual targets, which correspond to actual files created during a
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transformation sequences, when it is not yet known
+# whether a particular target should be created at all.
+
+import "class" : new ;
+import errors ;
+import path ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+
+
+# +--------------------------+
+# | virtual-target |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize-action() = 0 |
+# | actualize-location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | action | | abstract-file-target |
+# +=====================| * +======================+
+# | action-name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize-action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust-properties() | sources |
+# | actualize-sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | file-target | | searched-lib-target |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | compile-action | | link-action |
+# +=====================+ +=====================+
+# | adjust-properties() | | adjust-properties() |
+# +---------------------+ | actualize-sources() |
+# +---------------------+
+#
+# The 'compile-action' and 'link-action' classes are not defined here but in
+# builtin.jam modules. They are shown in the diagram to give the big picture.
+
+
+# Models a potential target. It can be converted into a Jam target and used in
+# building, if needed. However, it can be also dropped, which allows us to
+# search for different transformations and select only one.
+#
+class virtual-target
+{
+ import scanner ;
+ import sequence ;
+ import utility ;
+ import virtual-target ;
+
+ rule __init__ (
+ name # Target/project name.
+ : project # Project to which this target belongs.
+ )
+ {
+ self.name = $(name) ;
+ self.project = $(project) ;
+ self.dependencies = ;
+ }
+
+ # Name of this target.
+ #
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Project of this target.
+ #
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Adds additional 'virtual-target' instances this one depends on.
+ #
+ rule depends ( d + )
+ {
+ self.dependencies = [ sequence.merge $(self.dependencies) :
+ [ sequence.insertion-sort $(d) ] ] ;
+ }
+
+ rule dependencies ( )
+ {
+ return $(self.dependencies) ;
+ }
+
+ rule always ( )
+ {
+ .always = 1 ;
+ }
+
+ # Generates all the actual targets and sets up build actions for this
+ # target.
+ #
+ # If 'scanner' is specified, creates an additional target with the same
+ # location as the actual target, which will depend on the actual target and
+ # be associated with a 'scanner'. That additional target is returned. See
+ # the docs (#dependency_scanning) for rationale. Target must correspond to a
+ # file if 'scanner' is specified.
+ #
+ # If scanner is not specified then the actual target is returned.
+ #
+ rule actualize ( scanner ? )
+ {
+ local actual-name = [ actualize-no-scanner ] ;
+
+ if $(.always)
+ {
+ ALWAYS $(actual-name) ;
+ }
+
+ if ! $(scanner)
+ {
+ return $(actual-name) ;
+ }
+ else
+ {
+ # Add the scanner instance to the grist for name.
+ local g = [ sequence.join
+ [ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ;
+ local name = $(actual-name:G=$(g)) ;
+
+ if ! $(self.made.$(name))
+ {
+ self.made.$(name) = true ;
+
+ DEPENDS $(name) : $(actual-name) ;
+
+ actualize-location $(name) ;
+
+ scanner.install $(scanner) : $(name) $(__name__) ;
+ }
+ return $(name) ;
+ }
+ }
+
+# private: (overridables)
+
+ # Sets up build actions for 'target'. Should call appropriate rules and set
+ # target variables.
+ #
+ rule actualize-action ( target )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Sets up variables on 'target' which specify its location.
+ #
+ rule actualize-location ( target )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # If the target is a generated one, returns the path where it will be
+ # generated. Otherwise, returns an empty list.
+ #
+ rule path ( )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Returns the actual target name to be used in case when no scanner is
+ # involved.
+ #
+ rule actual-name ( )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+# implementation
+ rule actualize-no-scanner ( )
+ {
+ # In fact, we just need to merge virtual-target with
+ # abstract-file-target as the latter is the only class derived from the
+ # former. But that has been left for later.
+
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+# Target corresponding to a file. The exact mapping for file is not yet
+# specified in this class. (TODO: Actually, the class name could be better...)
+#
+# May be a source file (when no action is specified) or a derived file
+# (otherwise).
+#
+# The target's grist is a concatenation of its project's location, action
+# properties (for derived targets) and, optionally, value identifying the main
+# target.
+#
+class abstract-file-target : virtual-target
+{
+ import project ;
+ import regex ;
+ import sequence ;
+ import path ;
+ import type ;
+ import property-set ;
+ import indirect ;
+
+ rule __init__ (
+ name # Target's name.
+ exact ? # If non-empty, the name is exactly the name created file
+ # should have. Otherwise, the '__init__' method will add a
+ # suffix obtained from 'type' by calling
+ # 'type.generated-target-suffix'.
+ : type ? # Target's type.
+ : project
+ : action ?
+ )
+ {
+ virtual-target.__init__ $(name) : $(project) ;
+
+ self.type = $(type) ;
+ self.action = $(action) ;
+ if $(action)
+ {
+ $(action).add-targets $(__name__) ;
+
+ if $(self.type) && ! $(exact)
+ {
+ _adjust-name $(name) ;
+ }
+ }
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ # Sets the path. When generating target name, it will override any path
+ # computation from properties.
+ #
+ rule set-path ( path )
+ {
+ self.path = [ path.native $(path) ] ;
+ }
+
+ # Returns the currently set action.
+ #
+ rule action ( )
+ {
+ return $(self.action) ;
+ }
+
+ # Sets/gets the 'root' flag. Target is root if it directly corresponds to
+ # some variant of a main target.
+ #
+ rule root ( set ? )
+ {
+ if $(set)
+ {
+ self.root = true ;
+ }
+ return $(self.root) ;
+ }
+
+ # Gets or sets the subvariant which created this target. Subvariant is set
+ # when target is brought into existance and is never changed after that. In
+ # particular, if a target is shared by a subvariant, only the first is
+ # stored.
+ #
+ rule creating-subvariant ( s ? # If specified, specifies the value to set,
+ # which should be a 'subvariant' class
+ # instance.
+ )
+ {
+ if $(s) && ! $(self.creating-subvariant)
+ {
+ self.creating-subvariant = $(s) ;
+ }
+ return $(self.creating-subvariant) ;
+ }
+
+ rule actualize-action ( target )
+ {
+ if $(self.action)
+ {
+ $(self.action).actualize ;
+ }
+ }
+
+ # Return a human-readable representation of this target. If this target has
+ # an action, that is:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it is:
+ #
+ # { <self.name>.<self.type> }
+ #
+ rule str ( )
+ {
+ local action = [ action ] ;
+ local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
+
+ if $(action)
+ {
+ local sources = [ $(action).sources ] ;
+ local action-name = [ $(action).action-name ] ;
+
+ local ss ;
+ for local s in $(sources)
+ {
+ ss += [ $(s).str ] ;
+ }
+
+ return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
+ }
+ else
+ {
+ return "{" $(name-dot-type) "}" ;
+ }
+ }
+
+ rule less ( a )
+ {
+ if [ str ] < [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+ rule equal ( a )
+ {
+ if [ str ] = [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+# private:
+ rule actual-name ( )
+ {
+ if ! $(self.actual-name)
+ {
+ local grist = [ grist ] ;
+ local basename = [ path.native $(self.name) ] ;
+ self.actual-name = <$(grist)>$(basename) ;
+ }
+ return $(self.actual-name) ;
+ }
+
+ # Helper to 'actual-name', above. Computes a unique prefix used to
+ # distinguish this target from other targets with the same name creating
+ # different files.
+ #
+ rule grist ( )
+ {
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We generate prefixes in the form:
+ # <one letter approach code> <the actual prefix>
+ local path = [ path ] ;
+ if $(path)
+ {
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return p$(path) ;
+ }
+ else
+ {
+ # File is either source, which will be searched for, or is not a
+ # file at all. Use the location of project for distinguishing.
+ local project-location = [ $(self.project).get location ] ;
+ local location-grist = [ sequence.join [ regex.split
+ $(project-location) "/" ] : "!" ] ;
+
+ if $(self.action)
+ {
+ local ps = [ $(self.action).properties ] ;
+ local property-grist = [ $(ps).as-path ] ;
+ # 'property-grist' can be empty when 'ps' is an empty property
+ # set.
+ if $(property-grist)
+ {
+ location-grist = $(location-grist)/$(property-grist) ;
+ }
+ }
+
+ return l$(location-grist) ;
+ }
+ }
+
+ # Given the target name specified in constructor, returns the name which
+ # should be really used, by looking at the <tag> properties. Tag properties
+ # need to be specified as <tag>@rule-name. This makes Boost Build call the
+ # specified rule with the target name, type and properties to get the new
+ # name. If no <tag> property is specified or the rule specified by <tag>
+ # returns nothing, returns the result of calling
+ # virtual-target.add-prefix-and-suffix.
+ #
+ rule _adjust-name ( specified-name )
+ {
+ local ps ;
+ if $(self.action)
+ {
+ ps = [ $(self.action).properties ] ;
+ }
+ else
+ {
+ ps = [ property-set.empty ] ;
+ }
+
+ # We add ourselves to the properties so that any tag rule can get more
+ # direct information about the target than just that available through
+ # the properties. This is useful in implementing name changes based on
+ # the sources of the target. For example to make unique names of object
+ # files based on the source file. --grafik
+ ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
+
+ local tag = [ $(ps).get <tag> ] ;
+
+ if $(tag)
+ {
+ local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
+ if $(rule-name)
+ {
+ if $(tag[2])
+ {
+ errors.error "<tag>@rulename is present but is not the only"
+ "<tag> feature" ;
+ }
+
+ self.name = [ indirect.call $(rule-name) $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ else
+ {
+ errors.error
+ "The value of the <tag> feature must be '@rule-name'" ;
+ }
+ }
+
+ # If there is no tag or the tag rule returned nothing.
+ if ! $(tag) || ! $(self.name)
+ {
+ self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ }
+
+ rule actualize-no-scanner ( )
+ {
+ local name = [ actual-name ] ;
+
+ # Do anything only on the first invocation.
+ if ! $(self.made.$(name))
+ {
+ self.made.$(name) = true ;
+
+ if $(self.action)
+ {
+ # For non-derived target, we do not care if there are several
+ # virtual targets that refer to the same name. One case when
+ # this is unavoidable is when the file name is main.cpp and two
+ # targets have types CPP (for compiling) and MOCCABLE_CPP (for
+ # conversion to H via Qt tools).
+ virtual-target.register-actual-name $(name) : $(__name__) ;
+ }
+
+ for local i in $(self.dependencies)
+ {
+ DEPENDS $(name) : [ $(i).actualize ] ;
+ }
+
+ actualize-location $(name) ;
+ actualize-action $(name) ;
+ }
+ return $(name) ;
+ }
+}
+
+
+# Appends the suffix appropriate to 'type/property-set' combination to the
+# specified name and returns the result.
+#
+rule add-prefix-and-suffix ( specified-name : type ? : property-set )
+{
+ local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
+
+ # Handle suffixes for which no leading dot is desired. Those are specified
+ # by enclosing them in <...>. Needed by python so it can create "_d.so"
+ # extensions, for example.
+ if $(suffix:G)
+ {
+ suffix = [ utility.ungrist $(suffix) ] ;
+ }
+ else
+ {
+ suffix = .$(suffix) ;
+ }
+
+ local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
+
+ if [ MATCH ^($(prefix)) : $(specified-name) ]
+ {
+ prefix = ;
+ }
+ return $(prefix:E="")$(specified-name)$(suffix:E="") ;
+}
+
+
+# File targets with explicitly known location.
+#
+# The file path is determined as
+# * Value passed to the 'set-path' method, if any.
+# * For derived files, project's build dir, joined with components that
+# describe action properties. If free properties are not equal to the
+# project's reference properties an element with the name of the main
+# target is added.
+# * For source files, project's source dir.
+#
+# The file suffix is determined as:
+# * The value passed to the 'suffix' method, if any.
+# * The suffix corresponding to the target's type.
+#
+class file-target : abstract-file-target
+{
+ import "class" : new ;
+ import common ;
+ import errors ;
+
+ rule __init__ (
+ name exact ?
+ : type ? # Optional type for this target.
+ : project
+ : action ?
+ : path ?
+ )
+ {
+ abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
+ $(action) ;
+
+ self.path = $(path) ;
+ }
+
+ rule clone-with-different-type ( new-type )
+ {
+ return [ new file-target $(self.name) exact : $(new-type) :
+ $(self.project) : $(self.action) : $(self.path) ] ;
+ }
+
+ rule actualize-location ( target )
+ {
+ if $(self.action)
+ {
+ # This is a derived file.
+ local path = [ path ] ;
+ LOCATE on $(target) = $(path) ;
+
+ # Make sure the path exists.
+ DEPENDS $(target) : $(path) ;
+ common.MkDir $(path) ;
+
+ # It is possible that the target name includes a directory too, for
+ # example when installing headers. Create that directory.
+ if $(target:D)
+ {
+ local d = $(target:D) ;
+ d = $(d:R=$(path)) ;
+ DEPENDS $(target) : $(d) ;
+ common.MkDir $(d) ;
+ }
+
+ # For a real file target, we create a fake target depending on the
+ # real target. This allows us to run
+ #
+ # bjam hello.o
+ #
+ # without trying to guess the name of the real target. Note that the
+ # target has no directory name and uses a special <e> grist.
+ #
+ # First, that means that "bjam hello.o" will build all known hello.o
+ # targets. Second, the <e> grist makes sure this target will not be
+ # confused with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes a 'test.o' file, then the
+ # target for directory will be just 'test' the target for test.o
+ # will be <ptest/bin/gcc/debug>test.o and the target we create below
+ # will be <e>test.o
+ DEPENDS $(target:G=e) : $(target) ;
+ # Allow bjam <path-to-file>/<file> to work. This will not catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ DEPENDS $(target:G=e:R=$(path)) : $(target) ;
+ }
+ else
+ {
+ SEARCH on $(target) = [ path.native $(self.path) ] ;
+ }
+ }
+
+ # Returns the directory for this target.
+ #
+ rule path ( )
+ {
+ if ! $(self.path)
+ {
+ if $(self.action)
+ {
+ local p = [ $(self.action).properties ] ;
+ local path,relative-to-build-dir = [ $(p).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ self.path = [ path.native $(path) ] ;
+ }
+ }
+ return $(self.path) ;
+ }
+}
+
+
+class notfile-target : abstract-file-target
+{
+ rule __init__ ( name : project : action ? )
+ {
+ abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
+ }
+
+ # Returns nothing to indicate that the target's path is not known.
+ #
+ rule path ( )
+ {
+ return ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ # TEMPORARY $(target) ;
+ NOUPDATE $(target) ;
+ }
+}
+
+
+# Class representing an action. Both 'targets' and 'sources' should list
+# instances of 'virtual-target'. Action name should name a rule with this
+# prototype:
+# rule action-name ( targets + : sources * : properties * )
+# Targets and sources are passed as actual Jam targets. The rule may not
+# establish additional dependency relationships.
+#
+class action
+{
+ import "class" ;
+ import errors ;
+ import type ;
+ import toolset ;
+ import property-set ;
+ import indirect ;
+ import path ;
+ import set : difference ;
+
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ self.sources = $(sources) ;
+
+ self.action-name = [ indirect.make-qualified $(action-name) ] ;
+
+ if ! $(property-set)
+ {
+ property-set = [ property-set.empty ] ;
+ }
+
+ if ! [ class.is-instance $(property-set) ]
+ {
+ errors.error "Property set instance required" ;
+ }
+
+ self.properties = $(property-set) ;
+ }
+
+ rule add-targets ( targets * )
+ {
+ self.targets += $(targets) ;
+ }
+
+ rule replace-targets ( old-targets * : new-targets * )
+ {
+ self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
+ self.targets += $(new-targets) ;
+ }
+
+ rule targets ( )
+ {
+ return $(self.targets) ;
+ }
+
+ rule sources ( )
+ {
+ return $(self.sources) ;
+ }
+
+ rule action-name ( )
+ {
+ return $(self.action-name) ;
+ }
+
+ rule properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ # Generates actual build instructions.
+ #
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+
+ local ps = [ properties ] ;
+ local properties = [ adjust-properties $(ps) ] ;
+
+ local actual-targets ;
+ for local i in [ targets ]
+ {
+ actual-targets += [ $(i).actualize ] ;
+ }
+
+ actualize-sources [ sources ] : $(properties) ;
+
+ DEPENDS $(actual-targets) : $(self.actual-sources)
+ $(self.dependency-only-sources) ;
+
+ # This works around a bug with -j and actions that
+ # produce multiple target, where:
+ # - dependency on the first output is found, and
+ # the action is started
+ # - dependency on the second output is found, and
+ # bjam noticed that command is already running
+ # - instead of waiting for the command, dependents
+ # of the second targets are immediately updated.
+ if $(actual-targets[2])
+ {
+ INCLUDES $(actual-targets) : $(actual-targets) ;
+ }
+
+ # Action name can include additional argument to rule, which should
+ # not be passed to 'set-target-variables'
+ toolset.set-target-variables
+ [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
+ : $(properties) ;
+
+ # Reflect ourselves in a variable for the target. This allows
+ # looking up additional info for the action given the raw target.
+ # For example to debug or output action information from action
+ # rules.
+ .action on $(actual-targets) = $(__name__) ;
+
+ indirect.call $(self.action-name) $(actual-targets)
+ : $(self.actual-sources) : [ $(properties).raw ] ;
+
+ # Since we set up the creating action here, we set up the action for
+ # cleaning up as well.
+ common.Clean clean-all : $(actual-targets) ;
+ }
+ }
+
+ # Helper for 'actualize-sources'. For each passed source, actualizes it with
+ # the appropriate scanner. Returns the actualized virtual targets.
+ #
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result = ;
+ for local i in $(sources)
+ {
+ local scanner ;
+ if [ $(i).type ]
+ {
+ scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
+ }
+ result += [ $(i).actualize $(scanner) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Creates actual Jam targets for sources. Initializes the following member
+ # variables:
+ # 'self.actual-sources' -- sources passed to the updating action.
+ # 'self.dependency-only-sources' -- sources marked as dependencies, but
+ # are not used otherwise.
+ #
+ # New values will be *appended* to the variables. They may be non-empty if
+ # caller wants it.
+ #
+ rule actualize-sources ( sources * : property-set )
+ {
+ local dependencies = [ $(self.properties).get <dependency> ] ;
+
+ self.dependency-only-sources +=
+ [ actualize-source-type $(dependencies) : $(property-set) ] ;
+ self.actual-sources +=
+ [ actualize-source-type $(sources) : $(property-set) ] ;
+
+ # This is used to help bjam find dependencies in generated headers and
+ # other main targets, e.g. in:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # For bjam to find the dependency the generated target must be
+ # actualized (i.e. have its Jam target constructed). In the above case,
+ # if we are building just hello ("bjam hello"), 'a.h' will not be
+ # actualized unless we do it here.
+ local implicit = [ $(self.properties).get <implicit-dependency> ] ;
+ for local i in $(implicit)
+ {
+ $(i:G=).actualize ;
+ }
+ }
+
+ # Determines real properties when trying to build with 'properties'. This is
+ # the last chance to fix properties, for example to adjust includes to get
+ # generated headers correctly. Default implementation simply returns its
+ # argument.
+ #
+ rule adjust-properties ( property-set )
+ {
+ return $(property-set) ;
+ }
+}
+
+
+# Action class which does nothing --- it produces the targets with specific
+# properties out of nowhere. It is needed to distinguish virtual targets with
+# different properties that are known to exist and have no actions which create
+# them.
+#
+class null-action : action
+{
+ rule __init__ ( property-set ? )
+ {
+ action.__init__ : .no-action : $(property-set) ;
+ }
+
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+ for local i in [ targets ]
+ {
+ $(i).actualize ;
+ }
+ }
+ }
+}
+
+
+# Class which acts exactly like 'action', except that its sources are not
+# scanned for dependencies.
+#
+class non-scanning-action : action
+{
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ action.__init__ $(sources) : $(action-name) : $(property-set) ;
+ }
+
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result ;
+ for local i in $(sources)
+ {
+ result += [ $(i).actualize ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+# Creates a virtual target with an appropriate name and type from 'file'. If a
+# target with that name in that project already exists, returns that already
+# created target.
+#
+# FIXME: a more correct way would be to compute the path to the file, based on
+# name and source location for the project, and use that path to determine if
+# the target has already been created. This logic should be shared with how we
+# usually find targets identified by a specific target id. It should also be
+# updated to work correctly when the file is specified using both relative and
+# absolute paths.
+#
+# TODO: passing a project with all virtual targets is starting to be annoying.
+#
+rule from-file ( file : file-loc : project )
+{
+ import type ; # Had to do this here to break a circular dependency.
+
+ # Check whether we already created a target corresponding to this file.
+ local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
+
+ if $(.files.$(path))
+ {
+ return $(.files.$(path)) ;
+ }
+ else
+ {
+ local name = [ path.make $(file) ] ;
+ local type = [ type.type $(file) ] ;
+ local result ;
+
+ result = [ new file-target $(file) : $(type) : $(project) : :
+ $(file-loc) ] ;
+
+ .files.$(path) = $(result) ;
+ return $(result) ;
+ }
+}
+
+
+# Registers a new virtual target. Checks if there is already a registered target
+# with the same name, type, project and subvariant properties as well as the
+# same sources and equal action. If such target is found it is returned and a
+# new 'target' is not registered. Otherwise, 'target' is registered and
+# returned.
+#
+rule register ( target )
+{
+ local signature = [ sequence.join
+ [ $(target).path ] [ $(target).name ] : - ] ;
+
+ local result ;
+ for local t in $(.cache.$(signature))
+ {
+ local a1 = [ $(t).action ] ;
+ local a2 = [ $(target).action ] ;
+
+ if ! $(result)
+ {
+ if ! $(a1) && ! $(a2)
+ {
+ result = $(t) ;
+ }
+ else
+ {
+ if $(a1) && $(a2) &&
+ ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
+ ( [ $(a1).sources ] = [ $(a2).sources ] )
+ {
+ local ps1 = [ $(a1).properties ] ;
+ local ps2 = [ $(a2).properties ] ;
+ local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
+ [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
+ local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
+ [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
+ if $(p1) = $(p2)
+ {
+ result = $(t) ;
+ }
+ }
+ }
+ }
+ }
+
+ if ! $(result)
+ {
+ .cache.$(signature) += $(target) ;
+ result = $(target) ;
+ }
+
+ .recent-targets += $(result) ;
+ .all-targets += $(result) ;
+
+ return $(result) ;
+}
+
+
+# Each target returned by 'register' is added to the .recent-targets list,
+# returned by this function. This allows us to find all virtual targets created
+# when building a specific main target, even those constructed only as
+# intermediate targets.
+#
+rule recent-targets ( )
+{
+ return $(.recent-targets) ;
+}
+
+
+rule clear-recent-targets ( )
+{
+ .recent-targets = ;
+}
+
+
+# Returns all virtual targets ever created.
+#
+rule all-targets ( )
+{
+ return $(.all-targets) ;
+}
+
+
+# Returns all targets from 'targets' with types equal to 'type' or derived from
+# it.
+#
+rule select-by-type ( type : targets * )
+{
+ local result ;
+ for local t in $(targets)
+ {
+ if [ type.is-subtype [ $(t).type ] $(type) ]
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule register-actual-name ( actual-name : virtual-target )
+{
+ if $(.actual.$(actual-name))
+ {
+ local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
+ local cs2 = [ $(virtual-target).creating-subvariant ] ;
+ local cmt1 = [ $(cs1).main-target ] ;
+ local cmt2 = [ $(cs2).main-target ] ;
+
+ local action1 = [ $(.actual.$(actual-name)).action ] ;
+ local action2 = [ $(virtual-target).action ] ;
+ local properties-added ;
+ local properties-removed ;
+ if $(action1) && $(action2)
+ {
+ local p1 = [ $(action1).properties ] ;
+ p1 = [ $(p1).raw ] ;
+ local p2 = [ $(action2).properties ] ;
+ p2 = [ $(p2).raw ] ;
+ properties-removed = [ set.difference $(p1) : $(p2) ] ;
+ properties-removed ?= "none" ;
+ properties-added = [ set.difference $(p2) : $(p1) ] ;
+ properties-added ?= "none" ;
+ }
+ errors.error "Duplicate name of actual target:" $(actual-name)
+ : "previous virtual target" [ $(.actual.$(actual-name)).str ]
+ : "created from" [ $(cmt1).full-name ]
+ : "another virtual target" [ $(virtual-target).str ]
+ : "created from" [ $(cmt2).full-name ]
+ : "added properties:" $(properties-added)
+ : "removed properties:" $(properties-removed) ;
+ }
+ else
+ {
+ .actual.$(actual-name) = $(virtual-target) ;
+ }
+}
+
+
+# Traverses the dependency graph of 'target' and return all targets that will be
+# created before this one is created. If the root of some dependency graph is
+# found during traversal, it is either included or not, depending on the
+# 'include-roots' value. In either case traversal stops at root targets, i.e.
+# root target sources are not traversed.
+#
+rule traverse ( target : include-roots ? : include-sources ? )
+{
+ local result ;
+ if [ $(target).action ]
+ {
+ local action = [ $(target).action ] ;
+ # This includes the 'target' as well.
+ result += [ $(action).targets ] ;
+
+ for local t in [ $(action).sources ]
+ {
+ if ! [ $(t).root ]
+ {
+ result += [ traverse $(t) : $(include-roots) : $(include-sources) ] ;
+ }
+ else if $(include-roots)
+ {
+ result += $(t) ;
+ }
+ }
+ }
+ else if $(include-sources)
+ {
+ result = $(target) ;
+ }
+ return $(result) ;
+}
+
+
+# Takes an 'action' instance and creates a new instance of it and all targets
+# produced by the action. The rule-name and properties are set to
+# 'new-rule-name' and 'new-properties', if those are specified. Returns the
+# cloned action.
+#
+rule clone-action ( action : new-project : new-action-name ? : new-properties ? )
+{
+ if ! $(new-action-name)
+ {
+ new-action-name = [ $(action).action-name ] ;
+ }
+ if ! $(new-properties)
+ {
+ new-properties = [ $(action).properties ] ;
+ }
+
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+ local cloned-action = [ class.new $(action-class)
+ [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
+
+ local cloned-targets ;
+ for local target in [ $(action).targets ]
+ {
+ local n = [ $(target).name ] ;
+ # Do not modify produced target names.
+ local cloned-target = [ class.new file-target $(n) exact :
+ [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
+ local d = [ $(target).dependencies ] ;
+ if $(d)
+ {
+ $(cloned-target).depends $(d) ;
+ }
+ $(cloned-target).root [ $(target).root ] ;
+ $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
+
+ cloned-targets += $(cloned-target) ;
+ }
+
+ return $(cloned-action) ;
+}
+
+
+class subvariant
+{
+ import sequence ;
+ import type ;
+
+ rule __init__ ( main-target # The instance of main-target class.
+ : property-set # Properties requested for this target.
+ : sources *
+ : build-properties # Actually used properties.
+ : sources-usage-requirements # Properties propagated from sources.
+ : created-targets * ) # Top-level created targets.
+ {
+ self.main-target = $(main-target) ;
+ self.properties = $(property-set) ;
+ self.sources = $(sources) ;
+ self.build-properties = $(build-properties) ;
+ self.sources-usage-requirements = $(sources-usage-requirements) ;
+ self.created-targets = $(created-targets) ;
+
+ # Pre-compose a list of other dependency graphs this one depends on.
+ local deps = [ $(build-properties).get <implicit-dependency> ] ;
+ for local d in $(deps)
+ {
+ self.other-dg += [ $(d:G=).creating-subvariant ] ;
+ }
+
+ self.other-dg = [ sequence.unique $(self.other-dg) ] ;
+ }
+
+ rule main-target ( )
+ {
+ return $(self.main-target) ;
+ }
+
+ rule created-targets ( )
+ {
+ return $(self.created-targets) ;
+ }
+
+ rule requested-properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ rule build-properties ( )
+ {
+ return $(self.build-properties) ;
+ }
+
+ rule sources-usage-requirements ( )
+ {
+ return $(self.sources-usage-requirements) ;
+ }
+
+ rule set-usage-requirements ( usage-requirements )
+ {
+ self.usage-requirements = $(usage-requirements) ;
+ }
+
+ rule usage-requirements ( )
+ {
+ return $(self.usage-requirements) ;
+ }
+
+ # Returns all targets referenced by this subvariant, either directly or
+ # indirectly, and either as sources, or as dependency properties. Targets
+ # referred to using the dependency property are returned as properties, not
+ # targets.
+ #
+ rule all-referenced-targets ( theset )
+ {
+ # Find directly referenced targets.
+ local deps = [ $(self.build-properties).dependency ] ;
+ local all-targets = $(self.sources) $(deps) ;
+
+ # Find other subvariants.
+ local r ;
+ for local t in $(all-targets)
+ {
+ if ! [ $(theset).contains $(t) ]
+ {
+ $(theset).add $(t) ;
+ r += [ $(t:G=).creating-subvariant ] ;
+ }
+ }
+ r = [ sequence.unique $(r) ] ;
+ for local s in $(r)
+ {
+ if $(s) != $(__name__)
+ {
+ $(s).all-referenced-targets $(theset) ;
+ }
+ }
+ }
+
+ # Returns the properties specifying implicit include paths to generated
+ # headers. This traverses all targets in this subvariant and subvariants
+ # referred by <implcit-dependecy> properties. For all targets of type
+ # 'target-type' (or for all targets, if 'target-type' is not specified), the
+ # result will contain <$(feature)>path-to-that-target.
+ #
+ rule implicit-includes ( feature : target-type ? )
+ {
+ local key = ii$(feature)-$(target-type:E="") ;
+ if ! $($(key))-is-not-empty
+ {
+ local target-paths = [ all-target-directories $(target-type) ] ;
+ target-paths = [ sequence.unique $(target-paths) ] ;
+ local result = $(target-paths:G=$(feature)) ;
+ if ! $(result)
+ {
+ result = "" ;
+ }
+ $(key) = $(result) ;
+ }
+ if $($(key)) = ""
+ {
+ return ;
+ }
+ else
+ {
+ return $($(key)) ;
+ }
+ }
+
+ rule all-target-directories ( target-type ? )
+ {
+ if ! $(self.target-directories)
+ {
+ compute-target-directories $(target-type) ;
+ }
+ return $(self.target-directories) ;
+ }
+
+ rule compute-target-directories ( target-type ? )
+ {
+ local result ;
+ for local t in $(self.created-targets)
+ {
+ # Skip targets of the wrong type.
+ if ! $(target-type) ||
+ [ type.is-derived [ $(t).type ] $(target-type) ]
+ {
+ result = [ sequence.merge $(result) : [ $(t).path ] ] ;
+ }
+ }
+ for local d in $(self.other-dg)
+ {
+ result += [ $(d).all-target-directories $(target-type) ] ;
+ }
+ self.target-directories = $(result) ;
+ }
+}
diff --git a/jam-files/boost-build/kernel/boost-build.jam b/jam-files/boost-build/kernel/boost-build.jam
new file mode 100644
index 000000000..377f6ec02
--- /dev/null
+++ b/jam-files/boost-build/kernel/boost-build.jam
@@ -0,0 +1,5 @@
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+boost-build . ;
diff --git a/jam-files/boost-build/kernel/bootstrap.jam b/jam-files/boost-build/kernel/bootstrap.jam
new file mode 100644
index 000000000..89048af92
--- /dev/null
+++ b/jam-files/boost-build/kernel/bootstrap.jam
@@ -0,0 +1,263 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005, 2006 Rene Rivera
+# Copyright 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# First of all, check the jam version
+
+if $(JAM_VERSION:J="") < 030112
+{
+ ECHO "error: Boost.Jam version 3.1.12 or later required" ;
+ EXIT ;
+}
+
+local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
+
+for local r in $(required-rules)
+{
+ if ! $(r) in [ RULENAMES ]
+ {
+ ECHO "error: builtin rule '$(r)' is not present" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+local native =
+ regex transform 2
+ ;
+while $(native)
+{
+ if ! [ HAS_NATIVE_RULE $(native[1]) :
+ $(native[2]) :
+ $(native[3]) ]
+ {
+ ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
+ ECHO "error: or interface version of that rule is too low" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+ native = $(native[4-]) ;
+}
+
+# Check that the builtin .ENVIRON module is present. We don't have a
+# builtin to check that a module is present, so we assume that the PATH
+# environment variable is always set and verify that the .ENVIRON module
+# has non-empty value of that variable.
+module .ENVIRON
+{
+ local p = $(PATH) $(Path) $(path) ;
+ if ! $(p)
+ {
+ ECHO "error: no builtin module .ENVIRON is found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Check that @() functionality is present. Similarly to modules,
+# we don't have a way to test that directly. Instead we check that
+# $(TMPNAME) functionality is present which was added at roughly
+# the same time (more precisely it was added just before).
+{
+ if ! $(TMPNAME)
+ {
+ ECHO "error: no @() functionality found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Make sure that \n escape is avaiable.
+if "\n" = "n"
+{
+ if $(OS) = CYGWIN
+ {
+ ECHO "warning: escape sequences are not supported" ;
+ ECHO "warning: this will cause major misbehaviour on cygwin" ;
+ ECHO "warning: your version of bjam is likely out of date" ;
+ ECHO "warning: please get a fresh version from SVN." ;
+ }
+}
+
+# Bootstrap the module system. Then bring the import rule into the global module.
+#
+SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
+module modules { include <module@>modules.jam ; }
+IMPORT modules : import : : import ;
+
+{
+ # Add module subdirectories to the BOOST_BUILD_PATH, which allows
+ # us to make an incremental refactoring step by moving modules to
+ # the appropriate subdirectories, thereby achieving some physical
+ # separation of different layers without changing all of our code
+ # to specify subdirectories in import statements or use an extra
+ # level of qualification on imported names.
+
+ local subdirs =
+ kernel # only the most-intrinsic modules: modules, errors
+ util # low-level substrate: string/number handling, etc.
+ build # essential elements of the build system architecture
+ tools # toolsets for handling specific build jobs and targets.
+ contrib # user contributed (unreviewed) modules
+ . # build-system.jam lives here
+ ;
+ local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
+ BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
+
+ modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
+}
+
+# Reload the modules, to clean up things. The modules module can tolerate
+# being included twice.
+#
+import modules ;
+
+# Process option plugins first to alow them to prevent loading
+# the rest of the build system.
+#
+import option ;
+local dont-build = [ option.process ] ;
+
+# Should we skip building, i.e. loading the build system, according
+# to the options processed?
+#
+if ! $(dont-build)
+{
+ if ! --python in $(ARGV)
+ {
+ # Allow users to override the build system file from the
+ # command-line (mostly for testing)
+ local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
+ build-system ?= build-system ;
+
+ # Use last element in case of multiple command-line options
+ import $(build-system[-1]) ;
+ }
+ else
+ {
+ ECHO "Boost.Build V2 Python port (experimental)" ;
+
+ # Define additional interface that is exposed to Python code. Python code will
+ # also have access to select bjam builtins in the 'bjam' module, but some
+ # things are easier to define outside C.
+ module python_interface
+ {
+ rule load ( module-name : location )
+ {
+ USER_MODULE $(module-name) ;
+ # Make all rules in the loaded module available in
+ # the global namespace, so that we don't have
+ # to bother specifying "right" module when calling
+ # from Python.
+ module $(module-name)
+ {
+ __name__ = $(1) ;
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+ }
+
+ rule peek ( module-name ? : variables + )
+ {
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+ }
+
+ rule set-variable ( module-name : name : value * )
+ {
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+ }
+
+ rule set-top-level-targets ( targets * )
+ {
+ DEPENDS all : $(targets) ;
+ }
+
+ rule call-in-module ( m : rulename : * )
+ {
+ module $(m)
+ {
+ return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ }
+
+
+ rule set-update-action ( action : targets * : sources * : properties * )
+ {
+ $(action) $(targets) : $(sources) : $(properties) ;
+ }
+
+ rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
+ {
+ module $(m)
+ {
+ $(2) $(3) : $(4) : $(5) ;
+ }
+ }
+
+ rule set-target-variable ( targets + : variable : value * : append ? )
+ {
+ if $(append)
+ {
+ $(variable) on $(targets) += $(value) ;
+ }
+ else
+ {
+ $(variable) on $(targets) = $(value) ;
+ }
+ }
+
+ rule get-target-variable ( targets + : variable )
+ {
+ return [ on $(targets) return $($(variable)) ] ;
+ }
+
+ rule import-rules-from-parent ( parent-module : this-module : user-rules * )
+ {
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+
+ rule mark-included ( targets * : includes * ) {
+ NOCARE $(includes) ;
+ INCLUDES $(targets) : $(includes) ;
+ ISFILE $(includes) ;
+ }
+ }
+
+ PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
+ modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
+
+ module PyBB
+ {
+ local ok = [ bootstrap $(root) ] ;
+ if ! $(ok)
+ {
+ EXIT ;
+ }
+ }
+
+
+ #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
+
+ #module PyBB
+ #{
+ # main ;
+ #}
+
+ }
+}
diff --git a/jam-files/boost-build/kernel/class.jam b/jam-files/boost-build/kernel/class.jam
new file mode 100644
index 000000000..b8e55af35
--- /dev/null
+++ b/jam-files/boost-build/kernel/class.jam
@@ -0,0 +1,420 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Polymorphic class system built on top of core Jam facilities.
+#
+# Classes are defined by 'class' keywords::
+#
+# class myclass
+# {
+# rule __init__ ( arg1 ) # constructor
+# {
+# self.attribute = $(arg1) ;
+# }
+#
+# rule method1 ( ) # method
+# {
+# return [ method2 ] ;
+# }
+#
+# rule method2 ( ) # method
+# {
+# return $(self.attribute) ;
+# }
+# }
+#
+# The __init__ rule is the constructor, and sets member variables.
+#
+# New instances are created by invoking [ new <class> <args...> ]:
+#
+# local x = [ new myclass foo ] ; # x is a new myclass object
+# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
+#
+# Derived class are created by mentioning base classes in the declaration::
+#
+# class derived : myclass
+# {
+# rule __init__ ( arg )
+# {
+# myclass.__init__ $(arg) ; # call base __init__
+#
+# }
+#
+# rule method2 ( ) # method override
+# {
+# return $(self.attribute)XXX ;
+# }
+# }
+#
+# All methods operate virtually, replacing behavior in the base classes. For
+# example::
+#
+# local y = [ new derived foo ] ; # y is a new derived object
+# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
+#
+# Each class instance is its own core Jam module. All instance attributes and
+# methods are accessible without additional qualification from within the class
+# instance. All rules imported in class declaration, or visible in base classses
+# are also visible. Base methods are available in qualified form:
+# base-name.method-name. By convention, attribute names are prefixed with
+# "self.".
+
+import modules ;
+import numbers ;
+
+
+rule xinit ( instance : class )
+{
+ module $(instance)
+ {
+ __class__ = $(2) ;
+ __name__ = $(1) ;
+ }
+}
+
+
+rule new ( class args * : * )
+{
+ .next-instance ?= 1 ;
+ local id = object($(class))@$(.next-instance) ;
+
+ xinit $(id) : $(class) ;
+
+ INSTANCE $(id) : class@$(class) ;
+ IMPORT_MODULE $(id) ;
+ $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+
+ # Bump the next unique object name.
+ .next-instance = [ numbers.increment $(.next-instance) ] ;
+
+ # Return the name of the new instance.
+ return $(id) ;
+}
+
+
+rule bases ( class )
+{
+ module class@$(class)
+ {
+ return $(__bases__) ;
+ }
+}
+
+
+rule is-derived ( class : bases + )
+{
+ local stack = $(class) ;
+ local visited found ;
+ while ! $(found) && $(stack)
+ {
+ local top = $(stack[1]) ;
+ stack = $(stack[2-]) ;
+ if ! ( $(top) in $(visited) )
+ {
+ visited += $(top) ;
+ stack += [ bases $(top) ] ;
+
+ if $(bases) in $(visited)
+ {
+ found = true ;
+ }
+ }
+ }
+ return $(found) ;
+}
+
+
+# Returns true if the 'value' is a class instance.
+#
+rule is-instance ( value )
+{
+ return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
+}
+
+
+# Check if the given value is of the given type.
+#
+rule is-a (
+ instance # The value to check.
+ : type # The type to test for.
+)
+{
+ if [ is-instance $(instance) ]
+ {
+ return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
+ }
+}
+
+
+local rule typecheck ( x )
+{
+ local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
+ if ! [ is-a $(x) : $(class-name) ]
+ {
+ return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+
+ # This will be the construction function for a class called 'myclass'.
+ #
+ class myclass
+ {
+ import assert ;
+
+ rule __init__ ( x_ * : y_ * )
+ {
+ # Set some instance variables.
+ x = $(x_) ;
+ y = $(y_) ;
+ foo += 10 ;
+ }
+
+ rule set-x ( newx * )
+ {
+ x = $(newx) ;
+ }
+
+ rule get-x ( )
+ {
+ return $(x) ;
+ }
+
+ rule set-y ( newy * )
+ {
+ y = $(newy) ;
+ }
+
+ rule get-y ( )
+ {
+ return $(y) ;
+ }
+
+ rule f ( )
+ {
+ return [ g $(x) ] ;
+ }
+
+ rule g ( args * )
+ {
+ if $(x) in $(y)
+ {
+ return $(x) ;
+ }
+ else if $(y) in $(x)
+ {
+ return $(y) ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ rule get-class ( )
+ {
+ return $(__class__) ;
+ }
+
+ rule get-instance ( )
+ {
+ return $(__name__) ;
+ }
+
+ rule invariant ( )
+ {
+ assert.equal 1 : 1 ;
+ }
+
+ rule get-foo ( )
+ {
+ return $(foo) ;
+ }
+ }
+# class myclass ;
+
+ class derived1 : myclass
+ {
+ rule __init__ ( z_ )
+ {
+ myclass.__init__ $(z_) : X ;
+ z = $(z_) ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived1.g ;
+ }
+
+ rule h ( )
+ {
+ return derived1.h ;
+ }
+
+ rule get-z ( )
+ {
+ return $(z) ;
+ }
+
+ # Check that 'assert.equal' visible in base class is visible here.
+ #
+ rule invariant2 ( )
+ {
+ assert.equal 2 : 2 ;
+ }
+
+ # Check that 'assert.variable-not-empty' visible in base class is
+ # visible here.
+ #
+ rule invariant3 ( )
+ {
+ local v = 10 ;
+ assert.variable-not-empty v ;
+ }
+ }
+# class derived1 : myclass ;
+
+ class derived2 : myclass
+ {
+ rule __init__ ( )
+ {
+ myclass.__init__ 1 : 2 ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived2.g ;
+ }
+
+ # Test the ability to call base class functions with qualification.
+ #
+ rule get-x ( )
+ {
+ return [ myclass.get-x ] ;
+ }
+ }
+# class derived2 : myclass ;
+
+ class derived2a : derived2
+ {
+ rule __init__
+ {
+ derived2.__init__ ;
+ }
+ }
+# class derived2a : derived2 ;
+
+ local rule expect_derived2 ( [derived2] x ) { }
+
+ local a = [ new myclass 3 4 5 : 4 5 ] ;
+ local b = [ new derived1 4 ] ;
+ local b2 = [ new derived1 4 ] ;
+ local c = [ new derived2 ] ;
+ local d = [ new derived2 ] ;
+ local e = [ new derived2a ] ;
+
+ expect_derived2 $(d) ;
+ expect_derived2 $(e) ;
+
+ # Argument checking is set up to call exit(1) directly on failure, and we
+ # can not hijack that with try, so we should better not do this test by
+ # default. We could fix this by having errors look up and invoke the EXIT
+ # rule instead; EXIT can be hijacked (;-)
+ if --fail-typecheck in [ modules.peek : ARGV ]
+ {
+ try ;
+ {
+ expect_derived2 $(a) ;
+ }
+ catch
+ "Expected an instance of derived2 but got" instead
+ ;
+ }
+
+ #try ;
+ #{
+ # new bad_subclass ;
+ #}
+ #catch
+ # bad_subclass.bad_subclass failed to call base class constructor myclass.__init__
+ # ;
+
+ #try ;
+ #{
+ # class bad_subclass ;
+ #}
+ #catch bad_subclass has already been declared ;
+
+ assert.result 3 4 5 : $(a).get-x ;
+ assert.result 4 5 : $(a).get-y ;
+ assert.result 4 : $(b).get-x ;
+ assert.result X : $(b).get-y ;
+ assert.result 4 : $(b).get-z ;
+ assert.result 1 : $(c).get-x ;
+ assert.result 2 : $(c).get-y ;
+ assert.result 4 5 : $(a).f ;
+ assert.result derived1.g : $(b).f ;
+ assert.result derived2.g : $(c).f ;
+ assert.result derived2.g : $(d).f ;
+
+ assert.result 10 : $(b).get-foo ;
+
+ $(a).invariant ;
+ $(b).invariant2 ;
+ $(b).invariant3 ;
+
+ # Check that the __class__ attribute is getting properly set.
+ assert.result myclass : $(a).get-class ;
+ assert.result derived1 : $(b).get-class ;
+ assert.result $(a) : $(a).get-instance ;
+
+ $(a).set-x a.x ;
+ $(b).set-x b.x ;
+ $(c).set-x c.x ;
+ $(d).set-x d.x ;
+ assert.result a.x : $(a).get-x ;
+ assert.result b.x : $(b).get-x ;
+ assert.result c.x : $(c).get-x ;
+ assert.result d.x : $(d).get-x ;
+
+ class derived3 : derived1 derived2
+ {
+ rule __init__ ( )
+ {
+ }
+ }
+
+ assert.result : bases myclass ;
+ assert.result myclass : bases derived1 ;
+ assert.result myclass : bases derived2 ;
+ assert.result derived1 derived2 : bases derived3 ;
+
+ assert.true is-derived derived1 : myclass ;
+ assert.true is-derived derived2 : myclass ;
+ assert.true is-derived derived3 : derived1 ;
+ assert.true is-derived derived3 : derived2 ;
+ assert.true is-derived derived3 : derived1 derived2 myclass ;
+ assert.true is-derived derived3 : myclass ;
+
+ assert.false is-derived myclass : derived1 ;
+
+ assert.true is-instance $(a) ;
+ assert.false is-instance bar ;
+
+ assert.true is-a $(a) : myclass ;
+ assert.true is-a $(c) : derived2 ;
+ assert.true is-a $(d) : myclass ;
+ assert.false is-a literal : myclass ;
+}
diff --git a/jam-files/boost-build/kernel/errors.jam b/jam-files/boost-build/kernel/errors.jam
new file mode 100644
index 000000000..63b11e867
--- /dev/null
+++ b/jam-files/boost-build/kernel/errors.jam
@@ -0,0 +1,274 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Print a stack backtrace leading to this rule's caller. Each argument
+# represents a line of output to be printed after the first line of the
+# backtrace.
+#
+rule backtrace ( skip-frames prefix messages * : * )
+{
+ local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
+ local drop-elements = $(frame-skips[$(skip-frames)]) ;
+ if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
+ {
+ ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;"
+ "using 1 instead." ;
+ drop-elements = 5 ;
+ }
+
+ local args = $(.args) ;
+ if $(.user-modules-only)
+ {
+ local bt = [ nearest-user-location ] ;
+ ECHO "$(prefix) at $(bt) " ;
+ for local n in $(args)
+ {
+ if $($(n))-is-not-empty
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ }
+ else
+ {
+ # Get the whole backtrace, then drop the initial quadruples
+ # corresponding to the frames that must be skipped.
+ local bt = [ BACKTRACE ] ;
+ bt = $(bt[$(drop-elements)-]) ;
+
+ while $(bt)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
+
+ # The first time through, print each argument on a separate line.
+ for local n in $(args)
+ {
+ if $($(n))-is-not-empty
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ args = ; # Kill args so that this never happens again.
+
+ # Move on to the next quadruple.
+ bt = $(bt[5-]) ;
+ }
+ }
+}
+
+.args ?= messages 2 3 4 5 6 7 8 9 ;
+.disabled ?= ;
+.last-error-$(.args) ?= ;
+
+
+# try-catch --
+#
+# This is not really an exception-handling mechanism, but it does allow us to
+# perform some error-checking on our error-checking. Errors are suppressed after
+# a try, and the first one is recorded. Use catch to check that the error
+# message matched expectations.
+
+# Begin looking for error messages.
+#
+rule try ( )
+{
+ .disabled += true ;
+ .last-error-$(.args) = ;
+}
+
+
+# Stop looking for error messages; generate an error if an argument of messages
+# is not found in the corresponding argument in the error call.
+#
+rule catch ( messages * : * )
+{
+ .disabled = $(.disabled[2-]) ; # Pop the stack.
+
+ import sequence ;
+
+ if ! $(.last-error-$(.args))-is-not-empty
+ {
+ error-skip-frames 3 expected an error, but none occurred ;
+ }
+ else
+ {
+ for local n in $(.args)
+ {
+ if ! $($(n)) in $(.last-error-$(n))
+ {
+ local v = [ sequence.join $($(n)) : " " ] ;
+ v ?= "" ;
+ local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
+
+ .last-error-$(.args) = ;
+ error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
+ : got \"$(joined)\" instead ;
+ }
+ }
+ }
+}
+
+
+rule error-skip-frames ( skip-frames messages * : * )
+{
+ if ! $(.disabled)
+ {
+ backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ EXIT ;
+ }
+ else if ! $(.last-error-$(.args))
+ {
+ for local n in $(.args)
+ {
+ # Add an extra empty string so that we always have
+ # something in the event of an error
+ .last-error-$(n) = $($(n)) "" ;
+ }
+ }
+}
+
+if --no-error-backtrace in [ modules.peek : ARGV ]
+{
+ .no-error-backtrace = true ;
+}
+
+
+# Print an error message with a stack backtrace and exit.
+#
+rule error ( messages * : * )
+{
+ if $(.no-error-backtrace)
+ {
+ # Print each argument on a separate line.
+ for local n in $(.args)
+ {
+ if $($(n))-is-not-empty
+ {
+ if ! $(first-printed)
+ {
+ ECHO error: $($(n)) ;
+ first-printed = true ;
+ }
+ else
+ {
+ ECHO $($(n)) ;
+ }
+ }
+ }
+ EXIT ;
+ }
+ else
+ {
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
+
+
+# Same as 'error', but the generated backtrace will include only user files.
+#
+rule user-error ( messages * : * )
+{
+ .user-modules-only = 1 ;
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Print a warning message with a stack backtrace and exit.
+#
+rule warning
+{
+ backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Convert an arbitrary argument list into a list with ":" separators and quoted
+# elements representing the same information. This is mostly useful for
+# formatting descriptions of arguments with which a rule was called when
+# reporting an error.
+#
+rule lol->list ( * )
+{
+ local result ;
+ local remaining = 1 2 3 4 5 6 7 8 9 ;
+ while $($(remaining))
+ {
+ local n = $(remaining[1]) ;
+ remaining = $(remaining[2-]) ;
+
+ if $(n) != 1
+ {
+ result += ":" ;
+ }
+ result += \"$($(n))\" ;
+ }
+ return $(result) ;
+}
+
+
+# Return the file:line for the nearest entry in backtrace which correspond to a
+# user module.
+#
+rule nearest-user-location ( )
+{
+ local bt = [ BACKTRACE ] ;
+
+ local result ;
+ while $(bt) && ! $(result)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
+
+ if [ MATCH $(user-modules) : $(bt[1]:D=) ]
+ {
+ result = $(bt[1]):$(bt[2]) ;
+ }
+ bt = $(bt[5-]) ;
+ }
+ return $(result) ;
+}
+
+
+# If optimized rule is available in Jam, use it.
+if NEAREST_USER_LOCATION in [ RULENAMES ]
+{
+ rule nearest-user-location ( )
+ {
+ local r = [ NEAREST_USER_LOCATION ] ;
+ return $(r[1]):$(r[2]) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Show that we can correctly catch an expected error.
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : somewhere ;
+
+ # Show that unexpected errors generate real errors.
+ try ;
+ {
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : nowhere ;
+ }
+ catch expected \"nowhere\" in argument 2 ;
+
+ # Show that not catching an error where one was expected is an error.
+ try ;
+ {
+ try ;
+ {
+ }
+ catch ;
+ }
+ catch expected an error, but none occurred ;
+}
diff --git a/jam-files/boost-build/kernel/modules.jam b/jam-files/boost-build/kernel/modules.jam
new file mode 100644
index 000000000..1f75354fc
--- /dev/null
+++ b/jam-files/boost-build/kernel/modules.jam
@@ -0,0 +1,354 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Essentially an include guard; ensures that no module is loaded multiple times.
+.loaded ?= ;
+
+# A list of modules currently being loaded for error reporting of circular
+# dependencies.
+.loading ?= ;
+
+# A list of modules needing to be tested using their __test__ rule.
+.untested ?= ;
+
+# A list of modules which have been tested using their __test__ rule.
+.tested ?= ;
+
+
+# Runs internal Boost Build unit tests for the specified module. The module's
+# __test__ rule is executed in its own module to eliminate any inadvertent
+# effects of testing module dependencies (such as assert) on the module itself.
+#
+local rule run-module-test ( m )
+{
+ local tested-modules = [ modules.peek modules : .tested ] ;
+
+ if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
+ && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
+ {
+ modules.poke modules : .tested : $(tested-modules) $(m) ;
+
+ if ! ( __test__ in [ RULENAMES $(m) ] )
+ {
+ local argv = [ peek : ARGV ] ;
+ if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
+ {
+ ECHO warning: no __test__ rule defined in module $(m) ;
+ }
+ }
+ else
+ {
+ if ! ( --quiet in $(argv) )
+ {
+ ECHO testing module $(m)... ;
+ }
+
+ local test-module = __test-$(m)__ ;
+ IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ;
+ IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
+ module $(test-module)
+ {
+ __test__ ;
+ }
+ }
+ }
+}
+
+
+# Return the binding of the given module.
+#
+rule binding ( module )
+{
+ return $($(module).__binding__) ;
+}
+
+
+# Sets the module-local value of a variable. This is the most reliable way to
+# set a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# Returns the module-local value of a variable. This is the most reliable way to
+# examine a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule peek ( module-name ? : variables + )
+{
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+}
+
+
+# Call the given rule locally in the given module. Use this for rules accepting
+# rule names as arguments, so that the passed rule may be invoked in the context
+# of the rule's caller (for example, if the rule accesses module globals or is a
+# local rule). Note that rules called this way may accept at most 8 parameters.
+#
+rule call-in ( module-name ? : rule-name args * : * )
+{
+ module $(module-name)
+ {
+ return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+}
+
+
+# Given a possibly qualified rule name and arguments, remove any initial module
+# qualification from the rule and invoke it in that module. If there is no
+# module qualification, the rule is invoked in the global module. Note that
+# rules called this way may accept at most 8 parameters.
+#
+rule call-locally ( qualified-rule-name args * : * )
+{
+ local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
+ local rule-name = $(module-rule[2]) ;
+ rule-name ?= $(qualified-rule-name) ;
+ # We pass only 8 parameters here since Boost Jam allows at most 9 rule
+ # parameter positions and the call-in rule already uses up the initial
+ # position for the module name.
+ return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
+ $(4) : $(5) : $(6) : $(7) : $(8) ] ;
+}
+
+
+# Load the indicated module if it is not already loaded.
+#
+rule load (
+ module-name # Name of module to load. Rules will be defined in this
+ # module.
+ : filename ? # (partial) path to file; Defaults to $(module-name).jam.
+ : search * # Directories in which to search for filename. Defaults to
+ # $(BOOST_BUILD_PATH).
+)
+{
+ # Avoid loading modules twice.
+ if ! ( $(module-name) in $(.loaded) )
+ {
+ filename ?= $(module-name).jam ;
+
+ # Mark the module loaded so we do not try to load it recursively.
+ .loaded += $(module-name) ;
+
+ # Suppress tests if any module loads are already in progress.
+ local suppress-test = $(.loading[1]) ;
+
+ # Push this module on the loading stack.
+ .loading += $(module-name) ;
+
+ # Remember that it is untested.
+ .untested += $(module-name) ;
+
+ # Insert the new module's __name__ and __file__ globals.
+ poke $(module-name) : __name__ : $(module-name) ;
+ poke $(module-name) : __file__ : $(filename) ;
+
+ module $(module-name)
+ {
+ # Add some grist so that the module will have a unique target name.
+ local module-target = $(__file__:G=module@) ;
+
+ local search = $(3) ;
+ search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
+ SEARCH on $(module-target) = $(search) ;
+ BINDRULE on $(module-target) = modules.record-binding ;
+
+ include $(module-target) ;
+
+ # Allow the module to see its own names with full qualification.
+ local rules = [ RULENAMES $(__name__) ] ;
+ IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
+ }
+
+ if $(module-name) != modules && ! [ binding $(module-name) ]
+ {
+ import errors ;
+ errors.error "Could not find module" $(module-name) in $(search) ;
+ }
+
+ # Pop the loading stack. Must happen before testing or we will run into
+ # a circular loading dependency.
+ .loading = $(.loading[1--2]) ;
+
+ # Run any pending tests if this is an outer load.
+ if ! $(suppress-test)
+ {
+ local argv = [ peek : ARGV ] ;
+ for local m in $(.untested)
+ {
+ run-module-test $(m) ;
+ }
+ .untested = ;
+ }
+ }
+ else if $(module-name) in $(.loading)
+ {
+ import errors ;
+ errors.error loading \"$(module-name)\"
+ : circular module loading dependency:
+ : $(.loading)" ->" $(module-name) ;
+ }
+}
+
+
+# This helper is used by load (above) to record the binding (path) of each
+# loaded module.
+#
+rule record-binding ( module-target : binding )
+{
+ $(.loading[-1]).__binding__ = $(binding) ;
+}
+
+
+# Transform each path in the list, with all backslashes converted to forward
+# slashes and all detectable redundancy removed. Something like this is probably
+# needed in path.jam, but I am not sure of that, I do not understand it, and I
+# am not ready to move all of path.jam into the kernel.
+#
+local rule normalize-raw-paths ( paths * )
+{
+ local result ;
+ for p in $(paths:T)
+ {
+ result += [ NORMALIZE_PATH $(p) ] ;
+ }
+ return $(result) ;
+}
+
+
+.cwd = [ PWD ] ;
+
+
+# Load the indicated module and import rule names into the current module. Any
+# members of rules-opt will be available without qualification in the caller's
+# module. Any members of rename-opt will be taken as the names of the rules in
+# the caller's module, in place of the names they have in the imported module.
+# If rules-opt = '*', all rules from the indicated module are imported into the
+# caller's module. If rename-opt is supplied, it must have the same number of
+# elements as rules-opt.
+#
+rule import ( module-names + : rules-opt * : rename-opt * )
+{
+ if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
+ {
+ import errors ;
+ errors.error "Rule aliasing is only available for explicit imports." ;
+ }
+
+ if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
+ {
+ import errors ;
+ errors.error "When loading multiple modules, no specific rules or"
+ "renaming is allowed" ;
+ }
+
+ local caller = [ CALLER_MODULE ] ;
+
+ # Import each specified module
+ for local m in $(module-names)
+ {
+ if ! $(m) in $(.loaded)
+ {
+ # If the importing module isn't already in the BOOST_BUILD_PATH,
+ # prepend it to the path. We don't want to invert the search order
+ # of modules that are already there.
+
+ local caller-location ;
+ if $(caller)
+ {
+ caller-location = [ binding $(caller) ] ;
+ caller-location = $(caller-location:D) ;
+ caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
+ }
+
+ local search = [ peek : BOOST_BUILD_PATH ] ;
+ search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
+
+ if $(caller-location) && ! $(caller-location) in $(search)
+ {
+ search = $(caller-location) $(search) ;
+ }
+
+ load $(m) : : $(search) ;
+ }
+
+ IMPORT_MODULE $(m) : $(caller) ;
+
+ if $(rules-opt)
+ {
+ local source-names ;
+ if $(rules-opt) = *
+ {
+ local all-rules = [ RULENAMES $(m) ] ;
+ source-names = $(all-rules) ;
+ }
+ else
+ {
+ source-names = $(rules-opt) ;
+ }
+ local target-names = $(rename-opt) ;
+ target-names ?= $(source-names) ;
+ IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
+ }
+ }
+}
+
+
+# Define exported copies in $(target-module) of all rules exported from
+# $(source-module). Also make them available in the global module with
+# qualification, so that it is just as though the rules were defined originally
+# in $(target-module).
+#
+rule clone-rules ( source-module target-module )
+{
+ local rules = [ RULENAMES $(source-module) ] ;
+
+ IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
+ EXPORT $(target-module) : $(rules) ;
+ IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
+}
+
+
+# These rules need to be available in all modules to implement module loading
+# itself and other fundamental operations.
+local globalize = peek poke record-binding ;
+IMPORT modules : $(globalize) : : modules.$(globalize) ;
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import modules : normalize-raw-paths ;
+
+ module modules.__test__
+ {
+ foo = bar ;
+ }
+
+ assert.result bar : peek modules.__test__ : foo ;
+
+ poke modules.__test__ : foo : bar baz ;
+ assert.result bar baz : peek modules.__test__ : foo ;
+
+ assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
+ assert.result . : normalize-raw-paths . ;
+ assert.result .. : normalize-raw-paths .. ;
+ assert.result ../.. : normalize-raw-paths ../.. ;
+ assert.result .. : normalize-raw-paths ./.. ;
+ assert.result / / : normalize-raw-paths / \\ ;
+ assert.result a : normalize-raw-paths a ;
+ assert.result a : normalize-raw-paths a/ ;
+ assert.result /a : normalize-raw-paths /a/ ;
+ assert.result / : normalize-raw-paths /a/.. ;
+}
diff --git a/jam-files/boost-build/options/help.jam b/jam-files/boost-build/options/help.jam
new file mode 100644
index 000000000..b507e1edd
--- /dev/null
+++ b/jam-files/boost-build/options/help.jam
@@ -0,0 +1,212 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Rene Rivera
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module is the plug-in handler for the --help and --help-.*
+# command-line options
+import modules ;
+import assert ;
+import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
+import sequence ;
+import set ;
+import project ;
+import print ;
+import os ;
+import version ;
+import path ;
+
+# List of possible modules, but which really aren't.
+#
+.not-modules =
+ boost-build bootstrap site-config test user-config
+ -tools allyourbase boost-base features python stlport testing unit-tests ;
+
+# The help system options are parsed here and handed off to the doc
+# module to translate into documentation requests and actions. The
+# understood options are:
+#
+# --help-disable-<option>
+# --help-doc-options
+# --help-enable-<option>
+# --help-internal
+# --help-options
+# --help-usage
+# --help-output <type>
+# --help-output-file <file>
+# --help [<module-or-class>]
+#
+rule process (
+ command # The option.
+ : values * # The values, starting after the "=".
+ )
+{
+ assert.result --help : MATCH ^(--help).* : $(command) ;
+ local did-help = ;
+ switch $(command)
+ {
+ case --help-internal :
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
+ local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
+ local modules-to-list =
+ [ sequence.insertion-sort
+ [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
+ local modules-to-scan ;
+ for local m in $(modules-to-list)
+ {
+ local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
+ modules-to-scan += $(module-files[1]) ;
+ }
+ do-scan $(modules-to-scan) : print-help-all ;
+ did-help = true ;
+
+ case --help-enable-* :
+ local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) : enabled ;
+ did-help = true ;
+
+ case --help-disable-* :
+ local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) ;
+ did-help = true ;
+
+ case --help-output :
+ set-output $(values[1]) ;
+ did-help = true ;
+
+ case --help-output-file :
+ set-output-file $(values[1]) ;
+ did-help = true ;
+
+ case --help-doc-options :
+ local doc-module-spec = [ split-symbol doc ] ;
+ do-scan $(doc-module-spec[1]) : print-help-options ;
+ did-help = true ;
+
+ case --help-options :
+ print-help-usage ;
+ did-help = true ;
+
+ case --help :
+ local spec = $(values[1]) ;
+ if $(spec)
+ {
+ local spec-parts = [ split-symbol $(spec) ] ;
+ if $(spec-parts)
+ {
+ if $(spec-parts[2])
+ {
+ do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
+ }
+ else
+ {
+ do-scan $(spec-parts[1]) : print-help-module ;
+ }
+ }
+ else
+ {
+ EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
+ }
+ }
+ else
+ {
+ version.print ;
+ ECHO ;
+ # First print documentation from the current Jamfile, if any.
+ # FIXME: Generally, this duplication of project.jam logic is bad.
+ local names = [ modules.peek project : JAMROOT ]
+ [ modules.peek project : JAMFILE ] ;
+ local project-file = [ path.glob . : $(names) ] ;
+ if ! $(project-file)
+ {
+ project-file = [ path.glob-in-parents . : $(names) ] ;
+ }
+
+ for local p in $(project-file)
+ {
+ do-scan $(p) : print-help-project $(p) ;
+ }
+
+ # Next any user-config help.
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local user-config = [ GLOB $(user-path) : user-config.jam ] ;
+ if $(user-config)
+ {
+ do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
+ }
+
+ # Next any site-config help.
+ local site-config = [ GLOB $(user-path) : site-config.jam ] ;
+ if $(site-config)
+ {
+ do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
+ }
+
+ # Then the overall help.
+ print-help-top ;
+ }
+ did-help = true ;
+ }
+ if $(did-help)
+ {
+ UPDATE all ;
+ NOCARE all ;
+ }
+ return $(did-help) ;
+}
+
+# Split a reference to a symbol into module and symbol parts.
+#
+local rule split-symbol (
+ symbol # The symbol to split.
+ )
+{
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local module-name = $(symbol) ;
+ local symbol-name = ;
+ local result = ;
+ while ! $(result)
+ {
+ local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
+ if $(module-path)
+ {
+ # The 'module-name' in fact refers to module. Return the full
+ # module path and a symbol within it. If 'symbol' passed to this
+ # rule is already module, 'symbol-name' will be empty. Otherwise,
+ # it's initialized on the previous loop iteration.
+ # In case there are several modules by this name,
+ # use the first one.
+ result = $(module-path[1]) $(symbol-name) ;
+ }
+ else
+ {
+ if ! $(module-name:S)
+ {
+ result = - ;
+ }
+ else
+ {
+ local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
+ if $(symbol-name)
+ {
+ symbol-name = $(next-symbol-part).$(symbol-name) ;
+ }
+ else
+ {
+ symbol-name = $(next-symbol-part) ;
+ }
+ module-name = $(module-name:B) ;
+ }
+ }
+ }
+ if $(result) != -
+ {
+ return $(result) ;
+ }
+}
diff --git a/jam-files/boost-build/site-config.jam b/jam-files/boost-build/site-config.jam
new file mode 100644
index 000000000..ad22d6744
--- /dev/null
+++ b/jam-files/boost-build/site-config.jam
@@ -0,0 +1,4 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
diff --git a/jam-files/boost-build/tools/acc.jam b/jam-files/boost-build/tools/acc.jam
new file mode 100644
index 000000000..f04c9dc87
--- /dev/null
+++ b/jam-files/boost-build/tools/acc.jam
@@ -0,0 +1,118 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Copyright Boris Gubenko 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Boost.Build V2 toolset for the HP aC++ compiler.
+#
+
+import toolset : flags ;
+import feature ;
+import generators ;
+import common ;
+
+feature.extend toolset : acc ;
+toolset.inherit acc : unix ;
+generators.override builtin.lib-generator : acc.prebuilt ;
+generators.override acc.searched-lib-generator : searched-lib-generator ;
+
+# Configures the acc toolset.
+rule init ( version ? : user-provided-command * : options * )
+{
+ local condition = [ common.check-init-parameters acc
+ : version $(version) ] ;
+
+ local command = [ common.get-invocation-command acc : aCC
+ : $(user-provided-command) ] ;
+
+ common.handle-options acc : $(condition) : $(command) : $(options) ;
+}
+
+
+# Declare generators
+generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ;
+generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ;
+
+# Declare flags.
+flags acc CFLAGS <optimization>off : ;
+flags acc CFLAGS <optimization>speed : -O3 ;
+flags acc CFLAGS <optimization>space : -O2 ;
+
+flags acc CFLAGS <inlining>off : +d ;
+flags acc CFLAGS <inlining>on : ;
+flags acc CFLAGS <inlining>full : ;
+
+flags acc C++FLAGS <exception-handling>off : ;
+flags acc C++FLAGS <exception-handling>on : ;
+
+flags acc C++FLAGS <rtti>off : ;
+flags acc C++FLAGS <rtti>on : ;
+
+# We want the full path to the sources in the debug symbols because otherwise
+# the debugger won't find the sources when we use boost.build.
+flags acc CFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>off : -s ;
+
+# V2 does not have <shared-linkable>, not sure what this meant in V1.
+# flags acc CFLAGS <shared-linkable>true : +Z ;
+
+flags acc CFLAGS <profiling>on : -pg ;
+flags acc LINKFLAGS <profiling>on : -pg ;
+
+flags acc CFLAGS <address-model>64 : +DD64 ;
+flags acc LINKFLAGS <address-model>64 : +DD64 ;
+
+# It is unknown if there's separate option for rpath used only
+# at link time, similar to -rpath-link in GNU. We'll use -L.
+flags acc RPATH_LINK : <xdll-path> ;
+
+flags acc CFLAGS <cflags> ;
+flags acc C++FLAGS <cxxflags> ;
+flags acc DEFINES <define> ;
+flags acc UNDEFS <undef> ;
+flags acc HDRS <include> ;
+flags acc STDHDRS <sysinclude> ;
+flags acc LINKFLAGS <linkflags> ;
+flags acc ARFLAGS <arflags> ;
+
+flags acc LIBPATH <library-path> ;
+flags acc NEEDLIBS <library-file> ;
+flags acc FINDLIBS <find-shared-library> ;
+flags acc FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags acc CFLAGS <threading>multi : -mt ;
+flags acc LINKFLAGS <threading>multi : -mt ;
+
+flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+actions acc.link bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+SPACE = " " ;
+actions acc.link.dll bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+actions acc.compile.c
+{
+ cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions acc.compile.c++
+{
+ $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions updated together piecemeal acc.archive
+{
+ ar ru$(ARFLAGS:E="") "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/bison.jam b/jam-files/boost-build/tools/bison.jam
new file mode 100644
index 000000000..0689d4bd8
--- /dev/null
+++ b/jam-files/boost-build/tools/bison.jam
@@ -0,0 +1,32 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+import feature ;
+import type ;
+import property ;
+
+feature.feature bison.prefix : : free ;
+type.register Y : y ;
+type.register YY : yy ;
+generators.register-standard bison.bison : Y : C H ;
+generators.register-standard bison.bison : YY : CPP HPP ;
+
+rule init ( )
+{
+}
+
+rule bison ( dst dst_header : src : properties * )
+{
+ local r = [ property.select bison.prefix : $(properties) ] ;
+ if $(r)
+ {
+ PREFIX_OPT on $(<) = -p $(r:G=) ;
+ }
+}
+
+actions bison
+{
+ bison $(PREFIX_OPT) -d -o $(<[1]) $(>)
+}
diff --git a/jam-files/boost-build/tools/boostbook-config.jam b/jam-files/boost-build/tools/boostbook-config.jam
new file mode 100644
index 000000000..6e3f3ddc1
--- /dev/null
+++ b/jam-files/boost-build/tools/boostbook-config.jam
@@ -0,0 +1,13 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+#
+# This module is deprecated.
+# using boostbook ;
+# with no arguments now suffices.
+
+import toolset : using ;
+
+using boostbook ;
diff --git a/jam-files/boost-build/tools/boostbook.jam b/jam-files/boost-build/tools/boostbook.jam
new file mode 100644
index 000000000..3a5964c62
--- /dev/null
+++ b/jam-files/boost-build/tools/boostbook.jam
@@ -0,0 +1,727 @@
+# Copyright 2003, 2004, 2005 Dave Abrahams
+# Copyright 2003, 2004, 2005 Douglas Gregor
+# Copyright 2005, 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of documentation
+# from BoostBook sources.
+#
+# The type of output is controlled by the <format> feature which can
+# have the following values::
+#
+# * html: Generates html documention. This is the default.
+# * xhtml: Generates xhtml documentation
+# * htmlhelp: Generates html help output.
+# * onehtml: Generates a single html page.
+# * man: Generates man pages.
+# * pdf: Generates pdf documentation.
+# * ps: Generates postscript output.
+# * docbook: Generates docbook XML.
+# * fo: Generates XSL formating objects.
+# * tests: Extracts test cases from the boostbook XML.
+#
+# format is an implicit feature, so typing pdf on the command
+# line (for example) is a short-cut for format=pdf.
+
+import "class" : new ;
+import common ;
+import errors ;
+import targets ;
+import feature ;
+import generators ;
+import print ;
+import property ;
+import project ;
+import property-set ;
+import regex ;
+import scanner ;
+import sequence ;
+import make ;
+import os ;
+import type ;
+import modules path project ;
+import build-system ;
+
+import xsltproc : xslt xslt-dir ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project boostbook ;
+
+
+feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
+ : incidental implicit composite propagated ;
+
+type.register DTDXML : dtdxml ;
+type.register XML : xml ;
+type.register BOOSTBOOK : boostbook : XML ;
+type.register DOCBOOK : docbook : XML ;
+type.register FO : fo : XML ;
+type.register PDF : pdf ;
+type.register PS : ps ;
+type.register XSLT : xsl : XML ;
+type.register HTMLDIR ;
+type.register XHTMLDIR ;
+type.register HTMLHELP ;
+type.register MANPAGES ;
+type.register TESTS : tests ;
+# Artificial target type, used to require invocation of top-level
+# BoostBook generator.
+type.register BOOSTBOOK_MAIN ;
+
+
+# Initialize BoostBook support.
+rule init (
+ docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not
+ # provided, we use DOCBOOK_XSL_DIR from the environment
+ # (if available) or look in standard locations.
+ # Otherwise, we let the XML processor load the
+ # stylesheets remotely.
+
+ : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
+ # DOCBOOK_DTD_DIR From the environment (if available) or
+ # look in standard locations. Otherwise, we let the XML
+ # processor load the DTD remotely.
+
+ : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
+)
+{
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ check-boostbook-dir $(boostbook-dir) ;
+ find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
+
+ # Register generators only if we've were called via "using boostbook ; "
+ generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ;
+ generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ;
+ generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ;
+ generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
+ generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
+ generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
+ generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
+ generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
+ generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ;
+
+ # The same about Jamfile main target rules.
+ IMPORT $(__name__) : boostbook : : boostbook ;
+ }
+ else
+ {
+ if $(docbook-xsl-dir)
+ {
+ modify-config ;
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
+ check-docbook-xsl-dir ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ modify-config ;
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
+ check-docbook-dtd-dir ;
+ }
+ if $(boostbook-dir)
+ {
+ modify-config ;
+ check-boostbook-dir $(boostbook-dir) ;
+ local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
+ local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+ }
+ }
+}
+
+rule lock-config ( )
+{
+ if ! $(.initialized)
+ {
+ errors.user-error "BoostBook has not been configured." ;
+ }
+ if ! $(.config-locked)
+ {
+ .config-locked = true ;
+ }
+}
+
+rule modify-config ( )
+{
+ if $(.config-locked)
+ {
+ errors.user-error "BoostBook configuration cannot be changed after it has been used." ;
+ }
+}
+
+rule find-boost-in-registry ( keys * )
+{
+ local boost-root = ;
+ for local R in $(keys)
+ {
+ local installed-boost = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)"
+ : "InstallRoot" ] ;
+ if $(installed-boost)
+ {
+ boost-root += [ path.make $(installed-boost) ] ;
+ }
+ }
+ return $(boost-root) ;
+}
+
+rule check-docbook-xsl-dir ( )
+{
+ if $(.docbook-xsl-dir)
+ {
+ if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
+ {
+ errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
+ }
+ }
+ }
+}
+
+rule check-docbook-dtd-dir ( )
+{
+ if $(.docbook-dtd-dir)
+ {
+ if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
+ {
+ errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
+ }
+ }
+ }
+}
+
+rule check-boostbook-xsl-dir ( )
+{
+ if ! $(.boostbook-xsl-dir)
+ {
+ errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ;
+ }
+ else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
+ {
+ errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
+ }
+ }
+}
+
+rule check-boostbook-dtd-dir ( )
+{
+ if ! $(.boostbook-dtd-dir)
+ {
+ errors.user-error "error: BoostBook: could not find boostbook DTD." ;
+ }
+ else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
+ {
+ errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
+ }
+ }
+}
+
+rule check-boostbook-dir ( boostbook-dir ? )
+{
+ if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
+ {
+ errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ;
+ }
+}
+
+rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
+{
+ docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
+ docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
+ boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
+
+ # Look for the boostbook stylesheets relative to BOOST_ROOT
+ # and Boost.Build.
+ local boost-build-root = [ path.make [ build-system.location ] ] ;
+ local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ if $(boost-root)
+ {
+ boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ;
+ }
+ boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
+
+ # Try to find the tools in platform specific locations
+ if [ os.name ] = NT
+ {
+ # If installed by the Boost installer.
+ local boost-root = ;
+
+ local boost-installer-versions = snapshot cvs 1.33.0 ;
+ local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
+ local boostpro-installer-versions =
+ 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
+ 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
+
+ local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ;
+
+ # Make sure that the most recent version is searched for first
+ boost-root += [ sequence.reverse
+ [ find-boost-in-registry
+ Boost-Consulting.com\\$(boost-consulting-installer-versions)
+ boostpro.com\\$(boostpro-installer-versions) ] ] ;
+
+ # Plausible locations.
+ local root = [ PWD ] ;
+ while $(root) != $(root:D) { root = $(root:D) ; }
+ root = [ path.make $(root) ] ;
+ local search-dirs = ;
+ local docbook-search-dirs = ;
+ for local p in $(boost-root) {
+ search-dirs += [ path.join $(p) tools ] ;
+ }
+ for local p in $(old-installer-root)
+ {
+ search-dirs += [ path.join $(p) share ] ;
+ docbook-search-dirs += [ path.join $(p) share ] ;
+ }
+ search-dirs += [ path.join $(root) Boost tools ] ;
+ search-dirs += [ path.join $(root) Boost share ] ;
+ docbook-search-dirs += [ path.join $(root) Boost share ] ;
+
+ docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
+ docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
+ boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
+ }
+ else
+ {
+ # Plausible locations.
+
+ local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
+ local dtd-versions = 4.2 ;
+
+ docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
+
+ docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ;
+
+ boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
+
+ # Ubuntu Linux
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
+ docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
+ }
+
+ if $(docbook-xsl-dir)
+ {
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
+ }
+
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: Boost.Book: searching XSL/DTD in" ;
+ ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
+ }
+ local boostbook-xsl-dir ;
+ for local dir in $(boostbook-dir) {
+ boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
+ }
+ local boostbook-dtd-dir ;
+ for local dir in $(boostbook-dir) {
+ boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
+ }
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+
+ check-docbook-xsl-dir ;
+ check-docbook-dtd-dir ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+}
+
+rule xsl-dir
+{
+ lock-config ;
+ return $(.boostbook-xsl-dir) ;
+}
+
+rule dtd-dir
+{
+ lock-config ;
+ return $(.boostbook-dtd-dir) ;
+}
+
+rule docbook-xsl-dir
+{
+ lock-config ;
+ return $(.docbook-xsl-dir) ;
+}
+
+rule docbook-dtd-dir
+{
+ lock-config ;
+ return $(.docbook-dtd-dir) ;
+}
+
+rule dtdxml-to-boostbook ( target : source : properties * )
+{
+ lock-config ;
+ xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"
+ : $(properties) ;
+}
+
+rule boostbook-to-docbook ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule docbook-to-onehtml ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule docbook-to-htmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ;
+}
+
+rule docbook-to-xhtmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ;
+}
+
+rule docbook-to-htmlhelp ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ;
+}
+
+rule docbook-to-manpages ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ;
+}
+
+rule docbook-to-fo ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule format-catalog-path ( path )
+{
+ local result = $(path) ;
+ if [ xsltproc.is-cygwin ]
+ {
+ if [ os.name ] = NT
+ {
+ drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
+ result = /cygdrive/$(drive[1])$(drive[2]) ;
+ }
+ }
+ else
+ {
+ if [ os.name ] = CYGWIN
+ {
+ local native-path = [ path.native $(path) ] ;
+ result = [ path.make $(native-path:W) ] ;
+ }
+ }
+ return [ regex.replace $(result) " " "%20" ] ;
+}
+
+rule generate-xml-catalog ( target : sources * : properties * )
+{
+ print.output $(target) ;
+
+ # BoostBook DTD catalog entry
+ local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
+ if $(boostbook-dtd-dir)
+ {
+ boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
+ }
+
+ print.text
+ "<?xml version=\"1.0\"?>"
+ "<!DOCTYPE catalog "
+ " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
+ " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
+ "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
+ " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
+ : true ;
+
+ local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
+ if ! $(docbook-xsl-dir)
+ {
+ ECHO "BoostBook warning: no DocBook XSL directory specified." ;
+ ECHO " If you have the DocBook XSL stylesheets installed, please " ;
+ ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
+ ECHO " are available here: http://docbook.sourceforge.net/ " ;
+ ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
+ }
+
+ local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
+ if ! $(docbook-dtd-dir)
+ {
+ ECHO "BoostBook warning: no DocBook DTD directory specified." ;
+ ECHO " If you have the DocBook DTD installed, please set " ;
+ ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
+ ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
+ ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
+ }
+
+ print.text "</catalog>" ;
+}
+
+rule xml-catalog ( )
+{
+ if ! $(.xml-catalog)
+ {
+ # The target is created as part of the root project. But ideally
+ # it would be created as part of the boostbook project. This is not
+ # current possible as such global projects don't inherit things like
+ # the build directory.
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config &&
+ [ project.attribute $(root-project) parent-module ] != project-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+ .xml-catalog = [ new file-target boostbook_catalog
+ : XML
+ : [ project.target $(root-project) ]
+ : [ new action : boostbook.generate-xml-catalog ]
+ :
+ ] ;
+ .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
+ .xml-catalog-file = $(.xml-catalog-file:J=/) ;
+ }
+ return $(.xml-catalog) $(.xml-catalog-file) ;
+}
+
+class boostbook-generator : generator
+{
+ import feature ;
+ import virtual-target ;
+ import generators ;
+ import boostbook ;
+
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # Generate the catalog, but only once...
+ local global-catalog = [ boostbook.xml-catalog ] ;
+ local catalog = $(global-catalog[1]) ;
+ local catalog-file = $(global-catalog[2]) ;
+ local targets ;
+
+ # Add the catalog to the property set
+ property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
+
+ local type = none ;
+ local manifest ;
+ local format = [ $(property-set).get <format> ] ;
+ switch $(format)
+ {
+ case html :
+ {
+ type = HTMLDIR ;
+ manifest = HTML.manifest ;
+ }
+ case xhtml :
+ {
+ type = XHTMLDIR ;
+ manifest = HTML.manifest ;
+ }
+ case htmlhelp :
+ {
+ type = HTMLHELP ;
+ manifest = HTML.manifest ;
+ }
+
+ case onehtml : type = HTML ;
+
+ case man :
+ {
+ type = MANPAGES ;
+ manifest = man.manifest ;
+ }
+
+ case docbook : type = DOCBOOK ;
+ case fo : type = FO ;
+ case pdf : type = PDF ;
+ case ps : type = PS ;
+ case tests : type = TESTS ;
+ }
+
+ if $(manifest)
+ {
+ # Create DOCBOOK file from BOOSTBOOK sources.
+ local base-target = [ generators.construct $(project)
+ : DOCBOOK : $(property-set) : $(sources) ] ;
+ base-target = $(base-target[2]) ;
+ $(base-target).depends $(catalog) ;
+
+ # Generate HTML/PDF/PS from DOCBOOK.
+ local target = [ generators.construct $(project) $(name)_$(manifest)
+ : $(type)
+ : [ $(property-set).add-raw
+ <xsl:param>manifest=$(name)_$(manifest) ]
+ : $(base-target) ] ;
+ local name = [ $(property-set).get <name> ] ;
+ name ?= $(format) ;
+ $(target[2]).set-path $(name) ;
+ $(target[2]).depends $(catalog) ;
+
+ targets += $(target[2]) ;
+ }
+ else {
+ local target = [ generators.construct $(project)
+ : $(type) : $(property-set) : $(sources) ] ;
+
+ if ! $(target)
+ {
+ errors.error "Cannot build documentation type '$(format)'" ;
+ }
+ else
+ {
+ $(target[2]).depends $(catalog) ;
+ targets += $(target[2]) ;
+ }
+ }
+
+ return $(targets) ;
+ }
+}
+
+generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ;
+
+# Creates a boostbook target.
+rule boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+#############################################################################
+# Dependency scanners
+#############################################################################
+# XInclude scanner. Mostly stolen from c-scanner :)
+# Note that this assumes an "xi" prefix for XIncludes. This isn't always the
+# case for XML documents, but we'll assume it's true for anything we encounter.
+class xinclude-scanner : scanner
+{
+ import virtual-target ;
+ import path ;
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "xi:include[ ]*href=\"([^\"]*)\"" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+scanner.register xinclude-scanner : xsl:path ;
+type.set-scanner XML : xinclude-scanner ;
+
+rule boostbook-to-tests ( target : source : properties * )
+{
+ lock-config ;
+ local boost_root = [ modules.peek : BOOST_ROOT ] ;
+ local native-path =
+ [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ;
+ local stylesheet = $(native-path:S=.xsl) ;
+ xslt $(target) : $(source) $(stylesheet)
+ : $(properties) <xsl:param>boost.root=$(boost_root)
+ ;
+}
+
+
diff --git a/jam-files/boost-build/tools/borland.jam b/jam-files/boost-build/tools/borland.jam
new file mode 100644
index 000000000..6e43ca93a
--- /dev/null
+++ b/jam-files/boost-build/tools/borland.jam
@@ -0,0 +1,220 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2003 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Borland's command line compiler
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature : get-values ;
+import type ;
+import common ;
+
+feature.extend toolset : borland ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters borland :
+ version $(version) ] ;
+
+ local command = [ common.get-invocation-command borland : bcc32.exe
+ : $(command) ] ;
+
+ common.handle-options borland : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
+ flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
+ flags borland.link RUN_PATH $(condition) : $(root)/bin ;
+ flags borland .root $(condition) : $(root)/bin/ ;
+}
+
+
+# A borland-specific target type
+type.register BORLAND.TDS : tds ;
+
+# Declare generators
+
+generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
+generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
+
+generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
+generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
+generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
+generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
+
+# Declare flags
+
+flags borland.compile OPTIONS <debug-symbols>on : -v ;
+flags borland.link OPTIONS <debug-symbols>on : -v ;
+
+flags borland.compile OPTIONS <optimization>off : -Od ;
+flags borland.compile OPTIONS <optimization>speed : -O2 ;
+flags borland.compile OPTIONS <optimization>space : -O1 ;
+
+if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
+{
+ flags borland CFLAGS <inlining>off : -vi- ;
+ flags borland CFLAGS <inlining>on : -vi -w-inl ;
+ flags borland CFLAGS <inlining>full : -vi -w-inl ;
+}
+else
+{
+ flags borland CFLAGS : -vi- ;
+}
+
+flags borland.compile OPTIONS <warnings>off : -w- ;
+flags borland.compile OPTIONS <warnings>all : -w ;
+flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
+
+
+# Deal with various runtime configs...
+
+# This should be not for DLL
+flags borland OPTIONS <user-interface>console : -tWC ;
+
+# -tWR sets -tW as well, so we turn it off here and then turn it
+# on again later if we need it:
+flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
+flags borland OPTIONS <user-interface>gui : -tW ;
+
+flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
+# Hmm.. not sure what's going on here.
+flags borland OPTIONS : -WM- ;
+flags borland OPTIONS <threading>multi : -tWM ;
+
+
+
+flags borland.compile OPTIONS <cxxflags> ;
+flags borland.compile DEFINES <define> ;
+flags borland.compile INCLUDES <include> ;
+
+flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
+
+#
+# for C++ compiles the following options are turned on by default:
+#
+# -j5 stops after 5 errors
+# -g255 allow an unlimited number of warnings
+# -q no banner
+# -c compile to object
+# -P C++ code regardless of file extention
+# -a8 8 byte alignment, this option is on in the IDE by default
+# and effects binary compatibility.
+#
+
+# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+# For C, we don't pass -P flag
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+
+# Declare flags and action for linking
+toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
+toolset.flags borland.link LIBRARY_PATH <library-path> ;
+toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
+toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
+toolset.flags borland.link LIBRARIES <library-file> ;
+
+flags borland.link OPTIONS <linkflags> ;
+flags borland.link OPTIONS <link>shared : -tWD ;
+
+flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
+flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
+
+
+
+# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
+# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
+# $(BCC_TOOL_PATH) to the path
+# The NEED_IMPLIB variable controls whether we need to invoke implib.
+
+flags borland.archive AROPTIONS <archiveflags> ;
+
+# Declare action for archives. We don't use response file
+# since it's hard to get "+-" there.
+# The /P256 increases 'page' size -- with too low
+# values tlib fails when building large applications.
+# CONSIDER: don't know what 'together' is for...
+actions updated together piecemeal archive
+{
+ $(.set-path)$(.root:W)$(.old-path)
+ tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
+}
+
+
+if [ os.name ] = CYGWIN
+{
+ .set-path = "cmd /S /C set \"PATH=" ;
+ .old-path = ";%PATH%\" \"&&\"" ;
+
+
+ # Couldn't get TLIB to stop being confused about pathnames
+ # containing dashes (it seemed to treat them as option separators
+ # when passed through from bash), so we explicitly write the
+ # command into a .bat file and execute that. TLIB is also finicky
+ # about pathname style! Forward slashes, too, are treated as
+ # options.
+ actions updated together piecemeal archive
+ {
+ chdir $(<:D)
+ echo +-$(>:BS) > $(<:BS).rsp
+ $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
+ }
+}
+else if [ os.name ] = NT
+{
+ .set-path = "set \"PATH=" ;
+ .old-path = ";%PATH%\"
+ " ;
+}
+else
+{
+ .set-path = "PATH=\"" ;
+ .old-path = "\":$PATH
+ export PATH
+ " ;
+}
+
+RM = [ common.rm-command ] ;
+
+nl = "
+" ;
+
+actions link
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+
+actions link.dll bind LIBRARIES RSP
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
+}
+
+# It seems impossible to specify output file with directory when compiling
+# asm files using bcc32, so use tasm32 directly.
+# /ml makes all symbol names case-sensitive
+actions asm
+{
+ $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
+}
+
diff --git a/jam-files/boost-build/tools/builtin.jam b/jam-files/boost-build/tools/builtin.jam
new file mode 100644
index 000000000..148e7308d
--- /dev/null
+++ b/jam-files/boost-build/tools/builtin.jam
@@ -0,0 +1,960 @@
+# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
+# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
+# Copyright 2006 Juergen Hunold
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines standard features and rules.
+
+import alias ;
+import "class" : new ;
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import regex ;
+import scanner ;
+import sequence ;
+import stage ;
+import symlink ;
+import toolset ;
+import type ;
+import targets ;
+import types/register ;
+import utility ;
+import virtual-target ;
+import message ;
+import convert ;
+
+# FIXME: the following generate module import is not needed here but removing it
+# too hastly will break using code (e.g. the main Boost library Jamroot file)
+# that forgot to import the generate module before calling the generate rule.
+import generate ;
+
+
+.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd
+ openbsd osf qnx qnxnto sgi solaris unix unixware windows
+ elf # Not actually an OS -- used for targeting bare metal where
+ # object format is ELF. This catches both -elf and -eabi gcc
+ # targets and well as other compilers targeting ELF. It is not
+ # clear how often do we need to key of ELF specifically as opposed
+ # to other bare metal targets, but let's stick with gcc naming.
+ ;
+
+# Feature used to determine which OS we're on. New <target-os> and <host-os>
+# features should be used instead.
+local os = [ modules.peek : OS ] ;
+feature.feature os : $(os) : propagated link-incompatible ;
+
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+local rule default-host-os ( )
+{
+ local host-os ;
+ if [ os.name ] in $(.os-names:U)
+ {
+ host-os = [ os.name ] ;
+ }
+ else
+ {
+ switch [ os.name ]
+ {
+ case NT : host-os = windows ;
+ case AS400 : host-os = unix ;
+ case MINGW : host-os = windows ;
+ case BSDI : host-os = bsd ;
+ case COHERENT : host-os = unix ;
+ case DRAGONFLYBSD : host-os = bsd ;
+ case IRIX : host-os = sgi ;
+ case MACOSX : host-os = darwin ;
+ case KFREEBSD : host-os = freebsd ;
+ case LINUX : host-os = linux ;
+ case SUNOS :
+ ECHO "SunOS is not a supported operating system." ;
+ ECHO "We believe last version of SunOS was released in 1992, " ;
+ ECHO "so if you get this message, something is very wrong with configuration logic. " ;
+ ECHO "Please report this as a bug. " ;
+ EXIT ;
+ case * : host-os = unix ;
+ }
+ }
+ return $(host-os:L) ;
+}
+
+
+# The two OS features define a known set of abstract OS names. The host-os is
+# the OS under which bjam is running. Even though this should really be a fixed
+# property we need to list all the values to prevent unknown value errors. Both
+# set the default value to the current OS to account for the default use case of
+# building on the target OS.
+feature.feature host-os : $(.os-names) ;
+feature.set-default host-os : [ default-host-os ] ;
+
+feature.feature target-os : $(.os-names) : propagated link-incompatible ;
+feature.set-default target-os : [ default-host-os ] ;
+
+
+feature.feature toolset : : implicit propagated symmetric ;
+feature.feature stdlib : native : propagated composite ;
+feature.feature link : shared static : propagated ;
+feature.feature runtime-link : shared static : propagated ;
+feature.feature runtime-debugging : on off : propagated ;
+feature.feature optimization : off speed space none : propagated ;
+feature.feature profiling : off on : propagated ;
+feature.feature inlining : off on full : propagated ;
+feature.feature threading : single multi : propagated ;
+feature.feature rtti : on off : propagated ;
+feature.feature exception-handling : on off : propagated ;
+
+# Whether there is support for asynchronous EH (e.g. catching SEGVs).
+feature.feature asynch-exceptions : off on : propagated ;
+
+# Whether all extern "C" functions are considered nothrow by default.
+feature.feature extern-c-nothrow : off on : propagated ;
+
+feature.feature debug-symbols : on off none : propagated ;
+# Controls whether the binary should be stripped -- that is have
+# everything not necessary to running removed. This option should
+# not be very often needed. Also, this feature will show up in
+# target paths of everything, not just binaries. Should fix that
+# when impelementing feature relevance.
+feature.feature strip : off on : propagated ;
+feature.feature define : : free ;
+feature.feature undef : : free ;
+feature.feature "include" : : free path ; #order-sensitive ;
+feature.feature cflags : : free ;
+feature.feature cxxflags : : free ;
+feature.feature fflags : : free ;
+feature.feature asmflags : : free ;
+feature.feature linkflags : : free ;
+feature.feature archiveflags : : free ;
+feature.feature version : : free ;
+
+# Generic, i.e. non-language specific, flags for tools.
+feature.feature flags : : free ;
+feature.feature location-prefix : : free ;
+
+
+# The following features are incidental since they have no effect on built
+# products. Not making them incidental will result in problems in corner cases,
+# e.g.:
+#
+# unit-test a : a.cpp : <use>b ;
+# lib b : a.cpp b ;
+#
+# Here, if <use> is not incidental, we would decide we have two targets for
+# a.obj with different properties and complain about it.
+#
+# Note that making a feature incidental does not mean it is ignored. It may be
+# ignored when creating a virtual target, but the rest of build process will use
+# them.
+feature.feature use : : free dependency incidental ;
+feature.feature dependency : : free dependency incidental ;
+feature.feature implicit-dependency : : free dependency incidental ;
+
+feature.feature warnings :
+ on # Enable default/"reasonable" warning level for the tool.
+ all # Enable all possible warnings issued by the tool.
+ off # Disable all warnings issued by the tool.
+ : incidental propagated ;
+
+feature.feature warnings-as-errors :
+ off # Do not fail the compilation if there are warnings.
+ on # Fail the compilation if there are warnings.
+ : incidental propagated ;
+
+# Feature that allows us to configure the maximal template instantiation depth
+# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
+# actually support this configuration setting.
+#
+# Note that Boost Build currently does not allow defining features that take any
+# positive integral value as a parameter, which is what we need here, so we just
+# define some of the values here and leave it up to the user to extend this set
+# as he needs using the feature.extend rule.
+#
+# TODO: This should be upgraded as soon as Boost Build adds support for custom
+# validated feature values or at least features allowing any positive integral
+# value. See related Boost Build related trac ticket #194.
+#
+feature.feature c++-template-depth
+ :
+ [ numbers.range 64 1024 : 64 ]
+ [ numbers.range 20 1000 : 10 ]
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ 17
+ :
+ incidental optional propagated ;
+
+feature.feature source : : free dependency incidental ;
+feature.feature library : : free dependency incidental ;
+feature.feature file : : free dependency incidental ;
+feature.feature find-shared-library : : free ; #order-sensitive ;
+feature.feature find-static-library : : free ; #order-sensitive ;
+feature.feature library-path : : free path ; #order-sensitive ;
+
+# Internal feature.
+feature.feature library-file : : free dependency ;
+
+feature.feature name : : free ;
+feature.feature tag : : free ;
+feature.feature search : : free path ; #order-sensitive ;
+feature.feature location : : free path ;
+feature.feature dll-path : : free path ;
+feature.feature hardcode-dll-paths : true false : incidental ;
+
+
+# An internal feature that holds the paths of all dependency shared libraries.
+# On Windows, it is needed so that we can add all those paths to PATH when
+# running applications. On Linux, it is needed to add proper -rpath-link command
+# line options.
+feature.feature xdll-path : : free path ;
+
+# Provides means to specify def-file for windows DLLs.
+feature.feature def-file : : free dependency ;
+
+feature.feature suppress-import-lib : false true : incidental ;
+
+# Internal feature used to store the name of a bjam action to call when building
+# a target.
+feature.feature action : : free ;
+
+# This feature is used to allow specific generators to run. For example, QT
+# tools can only be invoked when QT library is used. In that case, <allow>qt
+# will be in usage requirement of the library.
+feature.feature allow : : free ;
+
+# The addressing model to generate code for. Currently a limited set only
+# specifying the bit size of pointers.
+feature.feature address-model : 16 32 64 32_64 : propagated optional ;
+
+# Type of CPU architecture to compile for.
+feature.feature architecture :
+ # x86 and x86-64
+ x86
+
+ # ia64
+ ia64
+
+ # Sparc
+ sparc
+
+ # RS/6000 & PowerPC
+ power
+
+ # MIPS/SGI
+ mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
+
+ # HP/PA-RISC
+ parisc
+
+ # Advanced RISC Machines
+ arm
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ combined
+ combined-x86-power
+
+ : propagated optional ;
+
+# The specific instruction set in an architecture to compile.
+feature.feature instruction-set :
+ # x86 and x86-64
+ native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
+ pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe
+ conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
+
+ # ia64
+ itanium itanium1 merced itanium2 mckinley
+
+ # Sparc
+ v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
+ sparclet tsc701 v9 ultrasparc ultrasparc3
+
+ # RS/6000 & PowerPC
+ 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
+ 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
+ power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
+
+ # MIPS
+ 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
+ r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
+ vr5000 vr5400 vr5500
+
+ # HP/PA-RISC
+ 700 7100 7100lc 7200 7300 8000
+
+ # Advanced RISC Machines
+ armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+
+ : propagated optional ;
+
+# Used to select a specific variant of C++ ABI if the compiler supports several.
+feature.feature c++abi : : propagated optional ;
+
+feature.feature conditional : : incidental free ;
+
+# The value of 'no' prevents building of a target.
+feature.feature build : yes no : optional ;
+
+# Windows-specific features
+
+feature.feature user-interface : console gui wince native auto ;
+
+feature.feature variant : : implicit composite propagated symmetric ;
+
+
+# Declares a new variant.
+#
+# First determines explicit properties for this variant, by refining parents'
+# explicit properties with the passed explicit properties. The result is
+# remembered and will be used if this variant is used as parent.
+#
+# Second, determines the full property set for this variant by adding to the
+# explicit properties default values for all missing non-symmetric properties.
+#
+# Lastly, makes appropriate value of 'variant' property expand to the full
+# property set.
+#
+rule variant ( name # Name of the variant
+ : parents-or-properties * # Specifies parent variants, if
+ # 'explicit-properties' are given, and
+ # explicit-properties or parents otherwise.
+ : explicit-properties * # Explicit properties.
+ )
+{
+ local parents ;
+ if ! $(explicit-properties)
+ {
+ if $(parents-or-properties[1]:G)
+ {
+ explicit-properties = $(parents-or-properties) ;
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+
+ # The problem is that we have to check for conflicts between base variants.
+ if $(parents[2])
+ {
+ errors.error "multiple base variants are not yet supported" ;
+ }
+
+ local inherited ;
+ # Add explicitly specified properties for parents.
+ for local p in $(parents)
+ {
+ # TODO: This check may be made stricter.
+ if ! [ feature.is-implicit-value $(p) ]
+ {
+ errors.error "Invalid base variant" $(p) ;
+ }
+
+ inherited += $(.explicit-properties.$(p)) ;
+ }
+ property.validate $(explicit-properties) ;
+ explicit-properties = [ property.refine $(inherited)
+ : $(explicit-properties) ] ;
+
+ # Record explicitly specified properties for this variant. We do this after
+ # inheriting parents' properties so they affect other variants derived from
+ # this one.
+ .explicit-properties.$(name) = $(explicit-properties) ;
+
+ feature.extend variant : $(name) ;
+ feature.compose <variant>$(name) : $(explicit-properties) ;
+}
+IMPORT $(__name__) : variant : : variant ;
+
+
+variant debug : <optimization>off <debug-symbols>on <inlining>off
+ <runtime-debugging>on ;
+variant release : <optimization>speed <debug-symbols>off <inlining>full
+ <runtime-debugging>off <define>NDEBUG ;
+variant profile : release : <profiling>on <debug-symbols>on ;
+
+
+class searched-lib-target : abstract-file-target
+{
+ rule __init__ ( name
+ : project
+ : shared ?
+ : search *
+ : action
+ )
+ {
+ abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
+ : $(action) : ;
+
+ self.shared = $(shared) ;
+ self.search = $(search) ;
+ }
+
+ rule shared ( )
+ {
+ return $(self.shared) ;
+ }
+
+ rule search ( )
+ {
+ return $(self.search) ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ }
+
+ rule path ( )
+ {
+ }
+}
+
+
+# The generator class for libraries (target type LIB). Depending on properties
+# it will request building of the appropriate specific library type --
+# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
+#
+class lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # The lib generator is composing, and can be only invoked with an
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.linking-generator) but duplicated here to avoid doing extra
+ # work.
+ if $(name)
+ {
+ local properties = [ $(property-set).raw ] ;
+ # Determine the needed target type.
+ local actual-type ;
+ # <source>files can be generated by <conditional>@rule feature
+ # in which case we do not consider it a SEARCHED_LIB type.
+ if ! <source> in $(properties:G) &&
+ ( <search> in $(properties:G) || <name> in $(properties:G) )
+ {
+ actual-type = SEARCHED_LIB ;
+ }
+ else if <file> in $(properties:G)
+ {
+ actual-type = LIB ;
+ }
+ else if <link>shared in $(properties)
+ {
+ actual-type = SHARED_LIB ;
+ }
+ else
+ {
+ actual-type = STATIC_LIB ;
+ }
+ property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
+ # Construct the target.
+ return [ generators.construct $(project) $(name) : $(actual-type)
+ : $(property-set) : $(sources) ] ;
+ }
+ }
+
+ rule viable-source-types ( )
+ {
+ return * ;
+ }
+}
+
+
+generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
+
+
+# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+# simplified: "lib a b c ;".
+#
+rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if $(names[2])
+ {
+ if <name> in $(requirements:G)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify the <name> feature." ;
+ }
+ if $(sources)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify sources." ;
+ }
+ }
+
+ # This is a circular module dependency so it must be imported here.
+ import targets ;
+
+ local project = [ project.current ] ;
+ local result ;
+
+ for local name in $(names)
+ {
+ local r = $(requirements) ;
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if ! $(sources) && ! <name> in $(requirements:G)
+ && ! <file> in $(requirements:G)
+ {
+ r += <name>$(name) ;
+ }
+ result += [ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : LIB
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+ }
+ return $(result) ;
+}
+IMPORT $(__name__) : lib : : lib ;
+
+
+class searched-lib-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( )
+ {
+ # The requirements cause the generators to be tried *only* when we're
+ # building a lib target with a 'search' feature. This seems ugly --- all
+ # we want is to make sure searched-lib-generator is not invoked deep
+ # inside transformation search to produce intermediate targets.
+ generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if $(name)
+ {
+ # If 'name' is empty, it means we have not been called to build a
+ # top-level target. In this case, we just fail immediately, because
+ # searched-lib-generator cannot be used to produce intermediate
+ # targets.
+
+ local properties = [ $(property-set).raw ] ;
+ local shared ;
+ if <link>shared in $(properties)
+ {
+ shared = true ;
+ }
+
+ local search = [ feature.get-values <search> : $(properties) ] ;
+
+ local a = [ new null-action $(property-set) ] ;
+ local lib-name = [ feature.get-values <name> : $(properties) ] ;
+ lib-name ?= $(name) ;
+ local t = [ new searched-lib-target $(lib-name) : $(project)
+ : $(shared) : $(search) : $(a) ] ;
+ # We return sources for a simple reason. If there is
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to 'png'
+ # will link to 'z', too.
+ return [ property-set.create <xdll-path>$(search) ]
+ [ virtual-target.register $(t) ] $(sources) ;
+ }
+ }
+}
+
+generators.register [ new searched-lib-generator ] ;
+
+
+class prebuilt-lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
+
+generators.override builtin.prebuilt : builtin.lib-generator ;
+
+class preprocessed-target-class : basic-target
+{
+ import generators ;
+ rule construct ( name : sources * : property-set )
+ {
+ local result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
+ if ! $(result)
+ {
+ result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
+ }
+ if ! $(result)
+ {
+ local s ;
+ for x in $(sources)
+ {
+ s += [ $(x).name ] ;
+ }
+ local p = [ project ] ;
+ errors.user-error
+ "In project" [ $(p).name ] :
+ "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
+ }
+ return $(result) ;
+ }
+}
+
+rule preprocessed ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+ return [ targets.main-target-alternative
+ [ new preprocessed-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+IMPORT $(__name__) : preprocessed : : preprocessed ;
+
+class compile-action : action
+{
+ import sequence ;
+
+ rule __init__ ( targets * : sources * : action-name : properties * )
+ {
+ action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
+ }
+
+ # For all virtual targets for the same dependency graph as self, i.e. which
+ # belong to the same main target, add their directories to the include path.
+ #
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes "include" : H ] ] ;
+ }
+}
+
+
+# Declare a special compiler generator. The only thing it does is changing the
+# type used to represent 'action' in the constructed dependency graph to
+# 'compile-action'. That class in turn adds additional include paths to handle
+# cases when a source file includes headers which are generated themselves.
+#
+class C-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) :
+ $(requirements) : $(optional-properties) ;
+ }
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+}
+
+
+rule register-c-compiler ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+{
+ generators.register [ new C-compiling-generator $(id) : $(source-types) :
+ $(target-types) : $(requirements) : $(optional-properties) ] ;
+}
+
+# FIXME: this is ugly, should find a better way (we would like client code to
+# register all generators as "generators.some-rule" instead of
+# "some-module.some-rule".)
+#
+IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
+
+
+# The generator class for handling EXE and SHARED_LIB creation.
+#
+class linking-generator : generator
+{
+ import path ;
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule __init__ ( id
+ composing ? : # The generator will be composing if a non-empty
+ # string is passed or the parameter is not given. To
+ # make the generator non-composing, pass an empty
+ # string ("").
+ source-types + :
+ target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ # Add <library-path> properties for all searched libraries.
+ local extra ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = SEARCHED_LIB
+ {
+ local search = [ $(s).search ] ;
+ extra += <library-path>$(search) ;
+ }
+ }
+
+ # It is possible that sources include shared libraries that did not came
+ # from 'lib' targets, e.g. .so files specified as sources. In this case
+ # we have to add extra dll-path properties and propagate extra xdll-path
+ # properties so that application linking to us will get xdll-path to
+ # those libraries.
+ local extra-xdll-paths ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
+ {
+ # Unfortunately, we do not have a good way to find the path to a
+ # file, so use this nasty approach.
+ #
+ # TODO: This needs to be done better. One thing that is really
+ # broken with this is that it does not work correctly with
+ # projects having multiple source locations.
+ local p = [ $(s).project ] ;
+ local location = [ path.root [ $(s).name ]
+ [ $(p).get source-location ] ] ;
+ extra-xdll-paths += [ path.parent $(location) ] ;
+ }
+ }
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if [ $(property-set).get <hardcode-dll-paths> ] = true
+ && [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ local xdll-path = [ $(property-set).get <xdll-path> ] ;
+ extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
+ }
+
+ if $(extra)
+ {
+ property-set = [ $(property-set).add-raw $(extra) ] ;
+ }
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ local ur ;
+ if $(result)
+ {
+ ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
+ ur = [ $(ur).add
+ [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
+ }
+ return $(ur) $(result) ;
+ }
+
+ rule extra-usage-requirements ( created-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ local extra ;
+
+ # Add appropricate <xdll-path> usage requirements.
+ local raw = [ $(property-set).raw ] ;
+ if <link>shared in $(raw)
+ {
+ local paths ;
+ local pwd = [ path.pwd ] ;
+ for local t in $(created-targets)
+ {
+ if [ type.is-derived [ $(t).type ] SHARED_LIB ]
+ {
+ paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
+ }
+ }
+ extra += $(paths:G=<xdll-path>) ;
+ }
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if a shared library is built, exe using it needs to know paths
+ # to other shared libraries this one depends on in order to be able to
+ # find them all at runtime.
+
+ # Just pass all features in property-set, it is theorically possible
+ # that we will propagate <xdll-path> features explicitly specified by
+ # the user, but then the user is to blaim for using an internal feature.
+ local values = [ $(property-set).get <xdll-path> ] ;
+ extra += $(values:G=<xdll-path>) ;
+
+ if $(extra)
+ {
+ result = [ property-set.create $(extra) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local libraries ; # Library sources.
+
+ # Searched libraries are not passed as arguments to the linker but via
+ # some option. So, we pass them to the action using a property.
+ properties2 = [ $(property-set).raw ] ;
+ local fsa ;
+ local fst ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
+ {
+ local name = [ $(s).name ] ;
+ if [ $(s).shared ]
+ {
+ fsa += $(name) ;
+ }
+ else
+ {
+ fst += $(name) ;
+ }
+ }
+ else
+ {
+ sources2 += $(s) ;
+ }
+ }
+ properties2 += <find-shared-library>$(fsa:J=&&)
+ <find-static-library>$(fst:J=&&) ;
+
+ return [ generator.generated-targets $(sources2)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
+}
+
+
+rule register-linker ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new linking-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# The generator class for handling STATIC_LIB creation.
+#
+class archive-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( id composing ? : source-types + : target-types +
+ : requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ # For static linking, if we get a library in source, we can not directly
+ # link to it so we need to cause our dependencies to link to that
+ # library. There are two approaches:
+ # - adding the library to the list of returned targets.
+ # - using the <library> usage requirements.
+ # The problem with the first is:
+ #
+ # lib a1 : : <file>liba1.a ;
+ # lib a2 : a2.cpp a1 : <link>static ;
+ # install dist : a2 ;
+ #
+ # here we will try to install 'a1', even though it is not necessary in
+ # the general case. With the second approach, even indirect dependants
+ # will link to the library, but it should not cause any harm. So, return
+ # all LIB sources together with created targets, so that dependants link
+ # to them.
+ local usage-requirements ;
+ if [ $(property-set).get <link> ] = static
+ {
+ for local t in $(sources)
+ {
+ if [ type.is-derived [ $(t).type ] LIB ]
+ {
+ usage-requirements += <library>$(t) ;
+ }
+ }
+ }
+
+ usage-requirements = [ property-set.create $(usage-requirements) ] ;
+
+ return $(usage-requirements) $(result) ;
+ }
+}
+
+
+rule register-archiver ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new archive-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# Generator that accepts everything and produces nothing. Useful as a general
+# fallback for toolset-specific actions like PCH generation.
+#
+class dummy-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ return [ property-set.empty ] ;
+ }
+}
+
+IMPORT $(__name__) : register-linker register-archiver
+ : : generators.register-linker generators.register-archiver ;
diff --git a/jam-files/boost-build/tools/cast.jam b/jam-files/boost-build/tools/cast.jam
new file mode 100644
index 000000000..6c84922f1
--- /dev/null
+++ b/jam-files/boost-build/tools/cast.jam
@@ -0,0 +1,91 @@
+# Copyright 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target 'cast', used to change type for target. For example, in Qt
+# library one wants two kinds of CPP files -- those that just compiled and those
+# that are passed via the MOC tool.
+#
+# This is done with:
+#
+# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
+#
+# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
+# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
+# support will run the MOC tool as part of the build process.
+#
+# At the moment, the 'cast' rule only works for non-derived (source) targets.
+#
+# TODO: The following comment is unclear or incorrect. Clean it up.
+# > Another solution would be to add a separate main target 'moc-them' that
+# > would moc all the passed sources, no matter what their type is, but I prefer
+# > cast, as defining a new target type + generator for that type is somewhat
+# > simpler than defining a main target rule.
+
+import "class" : new ;
+import errors ;
+import project ;
+import property-set ;
+import targets ;
+import type ;
+
+
+class cast-target-class : typed-target
+{
+ import type ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result ;
+ for local s in $(source-targets)
+ {
+ if ! [ class.is-a $(s) : file-target ]
+ {
+ import errors ;
+ errors.user-error Source to the 'cast' rule is not a file! ;
+ }
+ if [ $(s).action ]
+ {
+ import errors ;
+ errors.user-error Only non-derived target are allowed for
+ 'cast'. : when building [ full-name ] ;
+ }
+ local r = [ $(s).clone-with-different-type $(self.type) ] ;
+ result += [ virtual-target.register $(r) ] ;
+ }
+ return [ property-set.empty ] $(result) ;
+ }
+}
+
+
+rule cast ( name type : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ local real-type = [ type.type-from-rule-name $(type) ] ;
+ if ! $(real-type)
+ {
+ errors.user-error No type corresponds to the main target rule name
+ '$(type)' : "Hint: try a lowercase name" ;
+ }
+
+ targets.main-target-alternative [ new cast-target-class $(name) : $(project)
+ : $(real-type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : cast : : cast ;
diff --git a/jam-files/boost-build/tools/clang-darwin.jam b/jam-files/boost-build/tools/clang-darwin.jam
new file mode 100644
index 000000000..a8abc7d6a
--- /dev/null
+++ b/jam-files/boost-build/tools/clang-darwin.jam
@@ -0,0 +1,170 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import clang ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset clang : platform : darwin ;
+
+toolset.inherit-generators clang-darwin
+ <toolset>clang <toolset-clang:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override clang-darwin.prebuilt : builtin.lib-generator ;
+generators.override clang-darwin.prebuilt : builtin.prebuilt ;
+generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules clang-darwin : gcc ;
+toolset.inherit-flags clang-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the clang-darwin toolset
+# version in optional
+# name (default clang++) is used to invoke the specified clang complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command clang-darwin : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-darwin
+ : version $(version) ] ;
+
+ common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-darwin darwin $(condition) ;
+
+}
+
+SPACE = " " ;
+
+flags clang-darwin.compile OPTIONS <cflags> ;
+flags clang-darwin.compile OPTIONS <cxxflags> ;
+# flags clang-darwin.compile INCLUDES <include> ;
+
+# Declare flags and action for compilation.
+toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags clang-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of clang-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in clang-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags clang-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/clang-linux.jam b/jam-files/boost-build/tools/clang-linux.jam
new file mode 100644
index 000000000..036d749e6
--- /dev/null
+++ b/jam-files/boost-build/tools/clang-linux.jam
@@ -0,0 +1,196 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import clang ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset clang : platform : linux ;
+
+toolset.inherit-generators clang-linux
+ <toolset>clang <toolset-clang:platform>linux : gcc
+ : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
+generators.override clang-linux.prebuilt : builtin.lib-generator ;
+generators.override clang-linux.prebuilt : builtin.prebuilt ;
+generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH
+ : <toolset>clang <toolset-clang:platform>linux : pth ;
+
+toolset.inherit-rules clang-linux : gcc ;
+toolset.inherit-flags clang-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
+ .debug-configuration = true ;
+}
+
+rule init ( version ? : command * : options * ) {
+ command = [ common.get-invocation-command clang-linux : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+
+ if $(command) {
+ version ?= [ MATCH "version ([0-9.]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-linux
+ : version $(version) ] ;
+
+ common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-linux gnu $(condition) ;
+}
+
+###############################################################################
+# Flags
+
+toolset.flags clang-linux.compile OPTIONS <cflags> ;
+toolset.flags clang-linux.compile OPTIONS <cxxflags> ;
+
+toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
+toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
+
+# note: clang silently ignores some of these inlining options
+toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
+
+###############################################################################
+# C and C++ compilation
+
+rule compile.c++ ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ compile.c++.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ compile.c++.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c++.without-pth {
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+actions compile.c++.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ compile.c.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ compile.c.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c.without-pth
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# PCH emission
+
+rule compile.c++.pch ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch {
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# Linking
+
+SPACE = " " ;
+
+rule link ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
diff --git a/jam-files/boost-build/tools/clang.jam b/jam-files/boost-build/tools/clang.jam
new file mode 100644
index 000000000..e0ac9a553
--- /dev/null
+++ b/jam-files/boost-build/tools/clang.jam
@@ -0,0 +1,27 @@
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'clang' toolset. Depending on the current system, it
+# forwards either to 'clang-unix' or 'clang-darwin' modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : clang ;
+feature.subfeature toolset clang : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = MACOSX
+ {
+ toolset.using clang-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using clang-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/common.jam b/jam-files/boost-build/tools/common.jam
new file mode 100644
index 000000000..df914d9d4
--- /dev/null
+++ b/jam-files/boost-build/tools/common.jam
@@ -0,0 +1,986 @@
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides actions common to all toolsets, such as creating directories and
+# removing files.
+
+import os ;
+import modules ;
+import utility ;
+import print ;
+import type ;
+import feature ;
+import errors ;
+import path ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
+{
+ .show-configuration = true ;
+}
+
+# Configurations
+#
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. explicitly or by auto-detection
+# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
+# not yet been called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
+
+class configurations
+{
+ import errors ;
+
+ rule __init__ ( )
+ {
+ }
+
+ # Registers a configuration.
+ #
+ # Returns 'true' if the configuration has been added and an empty value if
+ # it already exists. Reports an error if the configuration is 'used'.
+ #
+ rule register ( id )
+ {
+ if $(id) in $(self.used)
+ {
+ errors.error "common: the configuration '$(id)' is in use" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.all)
+ {
+ self.all += $(id) ;
+
+ # Indicate that a new configuration has been added.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Mark a configuration as 'used'.
+ #
+ # Returns 'true' if the state of the configuration has been changed to
+ # 'used' and an empty value if it the state has not been changed. Reports an
+ # error if the configuration is not known.
+ #
+ rule use ( id )
+ {
+ if ! $(id) in $(self.all)
+ {
+ errors.error "common: the configuration '$(id)' is not known" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.used)
+ {
+ self.used += $(id) ;
+
+ # Indicate that the configuration has been marked as 'used'.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Return all registered configurations.
+ #
+ rule all ( )
+ {
+ return $(self.all) ;
+ }
+
+ # Return all used configurations.
+ #
+ rule used ( )
+ {
+ return $(self.used) ;
+ }
+
+ # Returns the value of a configuration parameter.
+ #
+ rule get ( id : param )
+ {
+ return $(self.$(param).$(id)) ;
+ }
+
+ # Sets the value of a configuration parameter.
+ #
+ rule set ( id : param : value * )
+ {
+ self.$(param).$(id) = $(value) ;
+ }
+}
+
+
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
+#
+# Each parameter name corresponds to a subfeature. This rule will declare a
+# subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
+#
+# The return value from this rule is a condition to be used for flags settings.
+#
+rule check-init-parameters ( toolset requirement * : * )
+{
+ local sig = $(toolset) ;
+ local condition = <toolset>$(toolset) ;
+ local subcondition ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local name = $($(index)[1]) ;
+ local value = $($(index)[2]) ;
+
+ if $(value)-is-not-empty
+ {
+ condition = $(condition)-$(value) ;
+ if $(.had-unspecified-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "no value was specified in earlier"
+ "initialization" : "an explicit value is specified now" ;
+ }
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can not pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
+ local t = $(toolset) ;
+ local m = [ MATCH ([^-]*)- : $(toolset) ] ;
+ if $(m)
+ {
+ t = $(m[1]) ;
+ }
+ if ! $(.had-value.$(toolset).$(name))
+ {
+ if ! $(.declared-subfeature.$(t).$(name))
+ {
+ feature.subfeature toolset $(t) : $(name) : : propagated ;
+ .declared-subfeature.$(t).$(name) = true ;
+ }
+ .had-value.$(toolset).$(name) = true ;
+ }
+ feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
+ subcondition += <toolset-$(t):$(name)>$(value) ;
+ }
+ else
+ {
+ if $(.had-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "an explicit value was specified in an"
+ "earlier initialization" : "no value is specified now" ;
+ }
+ .had-unspecified-value.$(toolset).$(name) = true ;
+ }
+ sig = $(sig)$(value:E="")- ;
+ }
+ if $(sig) in $(.all-signatures)
+ {
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local p = $($(index)) ;
+ if $(p)
+ {
+ message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
+ }
+ }
+ message += "previous initialization at $(.init-loc.$(sig))" ;
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
+ .all-signatures += $(sig) ;
+ .init-loc.$(sig) = [ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if $(requirement)
+ {
+ local r = <toolset>$(toolset) $(requirement) ;
+ r = $(r:J=,) ;
+ toolset.add-requirements $(r):$(subcondition) ;
+ }
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition += $(requirement) ;
+
+ if $(.show-configuration)
+ {
+ ECHO notice: $(condition) ;
+ }
+ return $(condition:J=/) ;
+}
+
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
+#
+# This rule returns the command to be used when invoking the tool. If we can not
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+#
+rule get-invocation-command-nodefault ( toolset : tool :
+ user-provided-command * : additional-paths * : path-last ? )
+{
+ local command ;
+ if ! $(user-provided-command)
+ {
+ command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ else
+ {
+ command = [ check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning: toolset $(toolset) initialization: " ;
+ ECHO "warning: can not find user-provided command " '$(user-provided-command)' ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+
+ return $(command) ;
+}
+
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+#
+rule get-invocation-command ( toolset : tool : user-provided-command * :
+ additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
+ $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
+#
+rule get-absolute-tool-path ( command )
+{
+ if $(command:D)
+ {
+ return $(command:D) ;
+ }
+ else
+ {
+ local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
+ return $(m[1]:D) ;
+ }
+}
+
+
+# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns the
+# first path found. Otherwise, returns an empty string. If 'path-last' is
+# specified, PATH is searched after 'additional-paths'.
+#
+local rule find-tool ( name : additional-paths * : path-last ? )
+{
+ local path = [ path.programs-path ] ;
+ local match = [ path.glob $(path) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
+
+ local result ;
+ if $(path-last)
+ {
+ result = $(additional-match) ;
+ if ! $(result) && $(match)
+ {
+ result = $(name) ;
+ }
+ }
+ else
+ {
+ if $(match)
+ {
+ result = $(name) ;
+ }
+ else
+ {
+ result = $(additional-match) ;
+ }
+ }
+ if $(result)
+ {
+ return [ path.native $(result[1]) ] ;
+ }
+}
+
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
+#
+local rule check-tool-aux ( command )
+{
+ if $(command:D)
+ {
+ if [ path.exists $(command) ]
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
+ # Only NT will run .bat & .cmd files by their unqualified names.
+ || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
+ [ path.exists $(command).cmd ] ) )
+ {
+ return $(command) ;
+ }
+ }
+ else
+ {
+ if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
+ {
+ return $(command) ;
+ }
+ }
+}
+
+
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If comand is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
+#
+local rule check-tool ( xcommand + )
+{
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
+ {
+ return $(xcommand) ;
+ }
+}
+
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
+# - CONFIG_COMMAND to $(command)
+# - OPTIONS for compile to the value of <compileflags> in $(options)
+# - OPTIONS for compile.c to the value of <cflags> in $(options)
+# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
+# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
+# - OPTIONS for link to the value of <linkflags> in $(options)
+#
+rule handle-options ( toolset : condition * : command * : options * )
+{
+ if $(.debug-configuration)
+ {
+ ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
+ }
+
+ # The last parameter ('unchecked') says it is OK to set flags for another
+ # module.
+ toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
+ : unchecked ;
+
+ toolset.flags $(toolset).compile OPTIONS $(condition) :
+ [ feature.get-values <compileflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c OPTIONS $(condition) :
+ [ feature.get-values <cflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
+ [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).link OPTIONS $(condition) :
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+}
+
+
+# Returns the location of the "program files" directory on a Windows platform.
+#
+rule get-program-files-dir ( )
+{
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ ProgramFiles = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ ProgramFiles = "c:\\Program Files" ;
+ }
+ return $(ProgramFiles) ;
+}
+
+
+if [ os.name ] = NT
+{
+ RM = del /f /q ;
+ CP = copy /b ;
+ IGNORE = "2>nul >nul & setlocal" ;
+ LN ?= $(CP) ;
+ # Ugly hack to convince copy to set the timestamp of the
+ # destination to the current time by concatenating the
+ # source with a nonexistent file. Note that this requires
+ # /b (binary) as the default when concatenating files is /a (ascii).
+ WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
+}
+else
+{
+ RM = rm -f ;
+ CP = cp ;
+ LN = ln ;
+}
+
+
+rule rm-command ( )
+{
+ return $(RM) ;
+}
+
+
+rule copy-command ( )
+{
+ return $(CP) ;
+}
+
+
+if "\n" = "n"
+{
+ # Escape characters are not supported. Use ugly hacks that won't work,
+ # see below.
+ nl = "
+" ;
+ q = "" ;
+}
+else
+{
+ nl = "\n" ;
+ q = "\"" ;
+}
+
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
+#
+rule variable-setting-command ( variable : value )
+{
+ if [ os.name ] = NT
+ {
+ return "set $(variable)=$(value)$(nl)" ;
+ }
+ else
+ {
+ # If we don't have escape characters support in bjam, the below blows
+ # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n
+ # sequence that messes up the executed export command which then reports
+ # that the passed variable name is incorrect.
+ # But we have a check for cygwin in kernel/bootstrap.jam already.
+ return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
+ }
+}
+
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
+#
+rule path-variable-setting-command ( variable : paths * )
+{
+ local sep = [ os.path-separator ] ;
+ return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
+}
+
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
+#
+rule prepend-path-variable-command ( variable : paths * )
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
+}
+
+
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
+#
+rule file-creation-command ( )
+{
+ if [ os.name ] = NT
+ {
+ # A few alternative implementations on Windows:
+ #
+ # 'type NUL >> '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also not change
+ # the target's timestamp in case the file already exists.
+ #
+ # 'type NUL > '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also destroy an
+ # already existing file by overwriting it with an empty one.
+ #
+ # I guess the best solution would be to allow Boost Jam to define
+ # built-in functions such as 'create a file', 'touch a file' or 'copy a
+ # file' which could be used from inside action code. That would allow
+ # completely portable operations without this kind of kludge.
+ # (22.02.2009.) (Jurko)
+ return "echo. > " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
+#
+rule file-touch-command ( )
+{
+ if [ os.name ] = NT
+ {
+ return "echo. >> " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+rule MkDir
+{
+ # If dir exists, do not update it. Do this even for $(DOT).
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ # Cheesy gate to prevent multiple invocations on same dir.
+ $(<)-mkdir = true ;
+
+ # Schedule the mkdir build action.
+ common.mkdir $(<) ;
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ DEPENDS dirs : $(<) ;
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ local s = $(<:P) ;
+ if [ os.name ] = NT
+ {
+ switch $(s)
+ {
+ case *: : s = ;
+ case *:\\ : s = ;
+ }
+ }
+
+ if $(s)
+ {
+ if $(s) != $(<)
+ {
+ DEPENDS $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else
+ {
+ NOTFILE $(s) ;
+ }
+ }
+ }
+}
+
+
+#actions MkDir1
+#{
+# mkdir "$(<)"
+#}
+
+# The following quick-fix actions should be replaced using the original MkDir1
+# action once Boost Jam gets updated to correctly detect different paths leading
+# up to the same filesystem target and triggers their build action only once.
+# (todo) (04.07.2008.) (Jurko)
+
+if [ os.name ] = NT
+{
+ actions mkdir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ }
+}
+else
+{
+ actions mkdir
+ {
+ mkdir -p "$(<)"
+ }
+}
+
+actions piecemeal together existing Clean
+{
+ $(RM) "$(>)"
+}
+
+
+rule copy
+{
+}
+
+
+actions copy
+{
+ $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
+}
+
+
+rule RmTemps
+{
+}
+
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) "$(>)" $(IGNORE)
+}
+
+
+actions hard-link
+{
+ $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
+}
+
+
+# Given a target, as given to a custom tag rule, returns a string formatted
+# according to the passed format. Format is a list of properties that is
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
+#
+# The format options can be:
+#
+# <base>[joiner]
+# :: The basename of the target name.
+# <toolset>[joiner]
+# :: The abbreviated toolset tag being used to build the target.
+# <threading>[joiner]
+# :: Indication of a multi-threaded build.
+# <runtime>[joiner]
+# :: Collective tag of the build runtime.
+# <version:/version-feature | X.Y[.Z]/>[joiner]
+# :: Short version tag taken from the given "version-feature" in the
+# build properties. Or if not present, the literal value as the
+# version number.
+# <property:/property-name/>[joiner]
+# :: Direct lookup of the given property-name value in the build
+# properties. /property-name/ is a regular expression. E.g.
+# <property:toolset-.*:flavor> will match every toolset.
+# /otherwise/
+# :: The literal value of the format argument.
+#
+# For example this format:
+#
+# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+#
+# Might return:
+#
+# boost_thread-vc80-mt-gd-1_33.dll, or
+# boost_regex-vc80-gd-1_33.dll
+#
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
+#
+rule format-name ( format * : name : type ? : property-set )
+{
+ local result = "" ;
+ for local f in $(format)
+ {
+ switch $(f:G)
+ {
+ case <base> :
+ result += $(name:B) ;
+
+ case <toolset> :
+ result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <threading> :
+ result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
+ : $(property-set) ] ] ;
+
+ case <runtime> :
+ result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <qt> :
+ result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <address-model> :
+ result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <version:*> :
+ local key = [ MATCH <version:(.*)> : $(f:G) ] ;
+ local version = [ $(property-set).get <$(key)> ] ;
+ version ?= $(key) ;
+ version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
+ result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
+
+ case <property:*> :
+ local key = [ MATCH <property:(.*)> : $(f:G) ] ;
+ local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
+ if $(p0)
+ {
+ local p = [ $(property-set).get <$(p0)> ] ;
+ if $(p)
+ {
+ result += [ join-tag $(f:G=) : $(p) ] ;
+ }
+ }
+
+ case * :
+ result += $(f:G=) ;
+ }
+ }
+ result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
+ $(property-set) ] ;
+ return $(result) ;
+}
+
+
+local rule join-tag ( joiner ? : tag ? )
+{
+ if ! $(joiner) { joiner = - ; }
+ return $(joiner)$(tag) ;
+}
+
+
+local rule toolset-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ switch [ $(property-set).get <toolset> ]
+ {
+ case borland* : tag += bcb ;
+ case clang* :
+ {
+ switch [ $(property-set).get <toolset-clang:platform> ]
+ {
+ case darwin : tag += clang-darwin ;
+ case linux : tag += clang ;
+ }
+ }
+ case como* : tag += como ;
+ case cw : tag += cw ;
+ case darwin* : tag += xgcc ;
+ case edg* : tag += edg ;
+ case gcc* :
+ {
+ switch [ $(property-set).get <toolset-gcc:flavor> ]
+ {
+ case *mingw* : tag += mgw ;
+ case * : tag += gcc ;
+ }
+ }
+ case intel :
+ if [ $(property-set).get <toolset-intel:platform> ] = win
+ {
+ tag += iw ;
+ }
+ else
+ {
+ tag += il ;
+ }
+ case kcc* : tag += kcc ;
+ case kylix* : tag += bck ;
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ case mipspro* : tag += mp ;
+ case msvc* : tag += vc ;
+ case qcc* : tag += qcc ;
+ case sun* : tag += sw ;
+ case tru64cxx* : tag += tru ;
+ case vacpp* : tag += xlc ;
+ }
+ local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
+ : $(properties) ] ;
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if $(tag) = vc
+ {
+ if $(version[1]) = 6
+ {
+ # Cancel minor version.
+ version = 6 ;
+ }
+ else if $(version[1]) = 7 && $(version[2]) = 0
+ {
+ version = 7 ;
+ }
+ }
+ # On intel, version is not added, because it does not matter and it is the
+ # version of vc used as backend that matters. Ideally, we should encode the
+ # backend version but that would break compatibility with V1.
+ if $(tag) = iw
+ {
+ version = ;
+ }
+
+ # On borland, version is not added for compatibility with V1.
+ if $(tag) = bcb
+ {
+ version = ;
+ }
+
+ tag += $(version) ;
+
+ return $(tag:J=) ;
+}
+
+
+local rule threading-tag ( name : type ? : property-set )
+{
+ local tag = ;
+ local properties = [ $(property-set).raw ] ;
+ if <threading>multi in $(properties) { tag = mt ; }
+
+ return $(tag:J=) ;
+}
+
+
+local rule runtime-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ if <runtime-link>static in $(properties) { tag += s ; }
+
+ # This is an ugly thing. In V1, there is code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules will not even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # known to care about runtime debugging.
+ if ( <toolset>msvc in $(properties) ) ||
+ ( <stdlib>stlport in $(properties) ) ||
+ ( <toolset-intel:platform>win in $(properties) )
+ {
+ if <runtime-debugging>on in $(properties) { tag += g ; }
+ }
+
+ if <python-debugging>on in $(properties) { tag += y ; }
+ if <variant>debug in $(properties) { tag += d ; }
+ if <stdlib>stlport in $(properties) { tag += p ; }
+ if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
+
+ return $(tag:J=) ;
+}
+
+# Create a tag for the Qt library version
+# "<qt>4.6.0" will result in tag "qt460"
+local rule qt-tag ( name : type ? : property-set )
+{
+ local properties = [ $(property-set).get <qt> ] ;
+ local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)"
+ : $(properties) ] ;
+ local tag = "qt"$(version:J=) ;
+ return $(tag) ;
+}
+
+# Create a tag for the address-model
+# <address-model>64 will simply generate "64"
+local rule address-model-tag ( name : type ? : property-set )
+{
+ local tag = ;
+ local version = [ $(property-set).get <address-model> ] ;
+ return $(version) ;
+}
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local nl = "
+" ;
+
+ local save-os = [ modules.peek os : .name ] ;
+
+ modules.poke os : .name : LINUX ;
+
+ assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)"
+ : path-variable-setting-command PATH : foo bar baz ;
+
+ assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : NT ;
+
+ assert.result "set PATH=foo;bar;baz$(nl)"
+ : path-variable-setting-command PATH : foo bar baz ;
+
+ assert.result "set PATH=foo;bar;%PATH%$(nl)"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : $(save-os) ;
+}
diff --git a/jam-files/boost-build/tools/como-linux.jam b/jam-files/boost-build/tools/como-linux.jam
new file mode 100644
index 000000000..5c554c8f8
--- /dev/null
+++ b/jam-files/boost-build/tools/como-linux.jam
@@ -0,0 +1,103 @@
+# Copyright 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+import common ;
+import generators ;
+
+import unix ;
+import como ;
+
+feature.extend-subfeature toolset como : platform : linux ;
+
+toolset.inherit-generators como-linux
+ <toolset>como <toolset-como:platform>linux : unix ;
+generators.override como-linux.prebuilt : builtin.lib-generator ;
+generators.override como-linux.searched-lib-generator : searched-lib-generator ;
+toolset.inherit-flags como-linux : unix ;
+toolset.inherit-rules como-linux : gcc ;
+
+generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+generators.register-c-compiler como-linux.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-linux
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-linux : como
+ : $(command) ] ;
+
+ common.handle-options como-linux : $(condition) : $(command) : $(options) ;
+}
+
+
+flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-linux C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-linux CFLAGS <inlining>off : --no_inlining ;
+flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ;
+
+flags como-linux CFLAGS <optimization>off : -O0 ;
+flags como-linux CFLAGS <optimization>speed : -O3 ;
+flags como-linux CFLAGS <optimization>space : -Os ;
+
+flags como-linux CFLAGS <debug-symbols>on : -g ;
+flags como-linux LINKFLAGS <debug-symbols>on : -g ;
+
+flags como-linux FINDLIBS : m ;
+flags como-linux FINDLIBS : rt ;
+
+flags como-linux CFLAGS <cflags> ;
+flags como-linux C++FLAGS <cxxflags> ;
+flags como-linux DEFINES <define> ;
+flags como-linux UNDEFS <undef> ;
+flags como-linux HDRS <include> ;
+flags como-linux STDHDRS <sysinclude> ;
+flags como-linux LINKFLAGS <linkflags> ;
+flags como-linux ARFLAGS <arflags> ;
+
+flags como-linux.link LIBRARIES <library-file> ;
+flags como-linux.link LINKPATH <library-path> ;
+flags como-linux.link FINDLIBS-ST <find-static-library> ;
+flags como-linux.link FINDLIBS-SA <find-shared-library> ;
+
+flags como-linux.link RPATH <dll-path> ;
+flags como-linux.link RPATH_LINK <xdll-path> ;
+
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions archive
+{
+ ar rcu $(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/como-win.jam b/jam-files/boost-build/tools/como-win.jam
new file mode 100644
index 000000000..d21a70d6f
--- /dev/null
+++ b/jam-files/boost-build/tools/como-win.jam
@@ -0,0 +1,117 @@
+# (C) Copyright David Abrahams 2001.
+# (C) Copyright MetaCommunications, Inc. 2004.
+
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import common ;
+import como ;
+import feature ;
+import generators ;
+import toolset : flags ;
+
+feature.extend-subfeature toolset como : platform : win ;
+
+
+# Initializes the Comeau toolset for windows. The command is the command which
+# invokes the compiler. You should either set environment variable
+# COMO_XXX_INCLUDE where XXX is the used backend (as described in the
+# documentation), or pass that as part of command, e.g:
+#
+# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-win
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-win : como.exe :
+ $(command) ] ;
+
+ common.handle-options como-win : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler como-win.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+generators.register-c-compiler como-win.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+
+
+generators.register-linker como-win.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>como <toolset-como:platform>win ;
+
+# Note that status of shared libraries support is not clear, so we do not define
+# the link.dll generator.
+generators.register-archiver como-win.archive
+ : OBJ : STATIC_LIB
+ : <toolset>como <toolset-como:platform>win ;
+
+
+flags como-win C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-win C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-win CFLAGS <inlining>off : --no_inlining ;
+flags como-win CFLAGS <inlining>on <inlining>full : --inlining ;
+
+
+# The following seems to be VC-specific options. At least, when I uncomment
+# then, Comeau with bcc as backend reports that bcc32 invocation failed.
+#
+#flags como-win CFLAGS <debug-symbols>on : /Zi ;
+#flags como-win CFLAGS <optimization>off : /Od ;
+
+
+flags como-win CFLAGS <cflags> ;
+flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header.
+flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on.
+flags como-win C++FLAGS <cxxflags> ;
+flags como-win DEFINES <define> ;
+flags como-win UNDEFS <undef> ;
+flags como-win HDRS <include> ;
+flags como-win SYSHDRS <sysinclude> ;
+flags como-win LINKFLAGS <linkflags> ;
+flags como-win ARFLAGS <arflags> ;
+flags como-win NO_WARN <no-warn> ;
+
+#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ;
+#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ;
+
+flags como-win LIBPATH <library-path> ;
+flags como-win LIBRARIES <library-file> ;
+flags como-win FINDLIBS <find-shared-library> ;
+flags como-win FINDLIBS <find-static-library> ;
+
+nl = "
+" ;
+
+
+# For como, we repeat all libraries so that dependencies are always resolved.
+#
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)"
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)"
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)"
+}
+
+actions archive
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+ lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+}
diff --git a/jam-files/boost-build/tools/como.jam b/jam-files/boost-build/tools/como.jam
new file mode 100644
index 000000000..04a05a94b
--- /dev/null
+++ b/jam-files/boost-build/tools/como.jam
@@ -0,0 +1,29 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'como' toolset. Depending on the current system, it
+# forwards either to 'como-linux' or 'como-win' modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : como ;
+feature.subfeature toolset como : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using como-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using como-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+
+ }
+}
diff --git a/jam-files/boost-build/tools/convert.jam b/jam-files/boost-build/tools/convert.jam
new file mode 100644
index 000000000..ac1d70101
--- /dev/null
+++ b/jam-files/boost-build/tools/convert.jam
@@ -0,0 +1,62 @@
+# Copyright (c) 2009 Vladimir Prus
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements 'convert' target that takes a bunch of source and
+# tries to convert each one to the specified type.
+#
+# For example:
+#
+# convert objects obj : a.cpp b.cpp ;
+#
+
+import targets ;
+import generators ;
+import project ;
+import type ;
+import "class" : new ;
+
+class convert-target-class : typed-target
+{
+ rule __init__ ( name : project : type
+ : sources * : requirements * : default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type)
+ : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ] # [ feature.expand
+ <main-target-type>$(self.type) ]
+ # ]
+ : $(source-targets) ] ;
+ if ! $(r)
+ {
+ errors.error "unable to construct" [ full-name ] ;
+ }
+
+ return $(r) ;
+ }
+
+}
+
+rule convert ( name type : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ # This is a circular module dependency, so it must be imported here
+ modules.import targets ;
+ targets.main-target-alternative
+ [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ]
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+IMPORT $(__name__) : convert : : convert ;
diff --git a/jam-files/boost-build/tools/cw-config.jam b/jam-files/boost-build/tools/cw-config.jam
new file mode 100644
index 000000000..1211b7c04
--- /dev/null
+++ b/jam-files/boost-build/tools/cw-config.jam
@@ -0,0 +1,34 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for CodeWarrior toolset. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 9 8 7
+ {
+ local cw-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "PATH" ] ;
+ local cw-version = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "VERSION" ] ;
+ cw-path ?= [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0"
+ : "PATH" ] ;
+ cw-version ?= $(R).0 ;
+
+ if $(cw-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/tools/cw.jam b/jam-files/boost-build/tools/cw.jam
new file mode 100644
index 000000000..ddcbfeb2b
--- /dev/null
+++ b/jam-files/boost-build/tools/cw.jam
@@ -0,0 +1,246 @@
+# Copyright (C) Reece H Dunn 2004
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# based on the msvc.jam toolset
+
+import property ;
+import generators ;
+import os ;
+import type ;
+import toolset : flags ;
+import errors : error ;
+import feature : feature get-values ;
+import path ;
+import sequence : unique ;
+import common ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : cw ;
+
+toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ;
+
+nl = "
+" ;
+
+rule init ( version ? : command * : options * )
+{
+ # TODO: fix the $(command[1]) = $(compiler) issue
+
+ setup = [ get-values <setup> : $(options) ] ;
+ setup ?= cwenv.bat ;
+ compiler = [ get-values <compiler> : $(options) ] ;
+ compiler ?= mwcc ;
+ linker = [ get-values <linker> : $(options) ] ;
+ linker ?= mwld ;
+
+ local condition = [ common.check-init-parameters cw :
+ version $(version) ] ;
+
+ command = [ common.get-invocation-command cw : mwcc.exe : $(command) :
+ [ default-paths $(version) ] ] ;
+
+ common.handle-options cw : $(condition) : $(command) : $(options) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ local tool-root = $(command) ;
+
+ setup = $(tool-root)\\$(setup) ;
+
+ # map the batch file in setup so it can be executed
+
+ other-tools = $(tool-root:D) ;
+ root ?= $(other-tools:D) ;
+
+ flags cw.link RUN_PATH $(condition) :
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime"
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;
+
+ setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ;
+
+ if [ os.name ] = NT
+ {
+ setup = $(setup)"
+" ;
+ }
+ else
+ {
+ setup = "cmd /S /C "$(setup)" \"&&\" " ;
+ }
+
+ # bind the setup command to the tool so it can be executed before the
+ # command
+
+ local prefix = $(setup) ;
+
+ flags cw.compile .CC $(condition) : $(prefix)$(compiler) ;
+ flags cw.link .LD $(condition) : $(prefix)$(linker) ;
+ flags cw.archive .LD $(condition) : $(prefix)$(linker) ;
+
+ if [ MATCH ^([89]\\.) : $(version) ]
+ {
+ if [ os.name ] = NT
+ {
+ # The runtime libraries
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ;
+
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ;
+
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ;
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ;
+ }
+ }
+}
+
+
+local rule default-paths ( version ? ) # FIXME
+{
+ local possible-paths ;
+ local ProgramFiles = [ common.get-program-files-dir ] ;
+
+ # TODO: add support for cw8 and cw9 detection
+
+ local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ;
+ possible-paths += $(version-6-path) ;
+
+ # perform post-processing
+
+ possible-paths
+ = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ;
+
+ possible-paths += [ modules.peek : PATH Path path ] ;
+
+ return $(possible-paths) ;
+}
+
+
+
+
+## declare generators
+
+generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ;
+generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ;
+
+generators.register-linker cw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>cw
+ ;
+generators.register-linker cw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : SHARED_LIB IMPORT_LIB
+ : <toolset>cw
+ ;
+
+generators.register-archiver cw.archive
+ : OBJ
+ : STATIC_LIB
+ : <toolset>cw
+ ;
+
+## compilation phase
+
+flags cw WHATEVER <toolset-cw:version> ;
+
+flags cw.compile CFLAGS <debug-symbols>on : -g ;
+flags cw.compile CFLAGS <optimization>off : -O0 ;
+flags cw.compile CFLAGS <optimization>speed : -O4,p ;
+flags cw.compile CFLAGS <optimization>space : -O4,s ;
+flags cw.compile CFLAGS <inlining>off : -inline off ;
+flags cw.compile CFLAGS <inlining>on : -inline on ;
+flags cw.compile CFLAGS <inlining>full : -inline all ;
+flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ;
+
+
+flags cw.compile CFLAGS <rtti>on : -RTTI on ;
+flags cw.compile CFLAGS <rtti>off : -RTTI off ;
+
+flags cw.compile CFLAGS <warnings>on : -w on ;
+flags cw.compile CFLAGS <warnings>off : -w off ;
+flags cw.compile CFLAGS <warnings>all : -w all ;
+flags cw.compile CFLAGS <warnings-as-errors>on : -w error ;
+
+flags cw.compile USER_CFLAGS <cflags> : ;
+flags cw.compile.c++ USER_CFLAGS <cxxflags> : ;
+
+flags cw.compile DEFINES <define> ;
+flags cw.compile UNDEFS <undef> ;
+flags cw.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+actions compile.c++
+{
+ $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+
+## linking phase
+
+flags cw.link DEF_FILE <def-file> ;
+
+flags cw LINKFLAGS : -search ;
+flags cw LINKFLAGS <debug-symbols>on : -g ;
+flags cw LINKFLAGS <user-interface>console : -subsystem console ;
+flags cw LINKFLAGS <user-interface>gui : -subsystem windows ;
+flags cw LINKFLAGS <user-interface>wince : -subsystem wince ;
+flags cw LINKFLAGS <user-interface>native : -subsystem native ;
+flags cw LINKFLAGS <user-interface>auto : -subsystem auto ;
+
+flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ;
+
+flags cw.link USER_LINKFLAGS <linkflags> ;
+flags cw.link LINKPATH <library-path> ;
+
+flags cw.link FINDLIBS_ST <find-static-library> ;
+flags cw.link FINDLIBS_SA <find-shared-library> ;
+flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ;
+flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+}
+
+if [ os.name ] in NT
+{
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+else # cygwin
+{
+ actions archive
+ {
+ _bbv2_out_="$(<)"
+ if test -f "$_bbv2_out_" ; then
+ _bbv2_existing_="$(<:W)"
+ fi
+ $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+
+actions link bind DEF_FILE
+{
+ $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+actions link.dll bind DEF_FILE
+{
+ $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
diff --git a/jam-files/boost-build/tools/darwin.jam b/jam-files/boost-build/tools/darwin.jam
new file mode 100644
index 000000000..283dface9
--- /dev/null
+++ b/jam-files/boost-build/tools/darwin.jam
@@ -0,0 +1,568 @@
+# Copyright 2003 Christopher Currie
+# Copyright 2006 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2005-2007 Mat Marcus
+# Copyright 2005-2007 Adobe Systems Incorporated
+# Copyright 2007-2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
+# for explanation why it's a separate toolset.
+
+import feature : feature ;
+import toolset : flags ;
+import type ;
+import common ;
+import generators ;
+import path : basename ;
+import version ;
+import property-set ;
+import regex ;
+import errors ;
+
+## Use a framework.
+feature framework : : free ;
+
+## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
+feature macosx-version : : propagated link-incompatible symmetric optional ;
+
+## The minimal MacOSX version to target.
+feature macosx-version-min : : propagated optional ;
+
+## A dependency, that is forced to be included in the link.
+feature force-load : : free dependency incidental ;
+
+#############################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : darwin ;
+import gcc ;
+toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+
+generators.override darwin.prebuilt : builtin.prebuilt ;
+generators.override darwin.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
+generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
+
+toolset.inherit-rules darwin : gcc : localize ;
+toolset.inherit-flags darwin : gcc
+ : <runtime-link>static
+ <architecture>arm/<address-model>32
+ <architecture>arm/<address-model>64
+ <architecture>arm/<instruction-set>
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <architecture>x86/<instruction-set>
+ <architecture>power/<address-model>32
+ <architecture>power/<address-model>64
+ <architecture>power/<instruction-set> ;
+
+# Options:
+#
+# <root>PATH
+# Platform root path. The common autodetection will set this to
+# "/Developer". And when a command is given it will be set to
+# the corresponding "*.platform/Developer" directory.
+#
+rule init ( version ? : command * : options * : requirement * )
+{
+ # First time around, figure what is host OSX version
+ if ! $(.host-osx-version)
+ {
+ .host-osx-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: OSX version on this machine is $(.host-osx-version) ;
+ }
+ }
+
+ # - The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+
+ # - The bin directory where to find the commands to execute.
+ local bin ;
+
+ # - The configured compile driver command.
+ local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
+
+ # The version as reported by the compiler
+ local real-version ;
+
+ # - Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
+ if $(bin) = "/usr/bin"
+ {
+ root ?= /Developer ;
+ }
+ else
+ {
+ local r = $(bin:D) ;
+ r = $(r:D) ;
+ root ?= $(r) ;
+ }
+ }
+
+ # - Autodetect the version if not given.
+ if $(command)
+ {
+ # - The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ real-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ version ?= $(real-version) ;
+ }
+
+ .real-version.$(version) = $(real-version) ;
+
+ # - Define the condition for this toolset instance.
+ local condition =
+ [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
+
+ # - Set the toolset generic common options.
+ common.handle-options darwin : $(condition) : $(command) : $(options) ;
+
+ # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
+ if $(real-version) < "4.0.0"
+ {
+ flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
+ }
+ # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
+ if $(real-version) < "4.2.0"
+ {
+ flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
+ }
+
+ # - Set the link flags common with the GCC toolset.
+ gcc.init-link-flags darwin darwin $(condition) ;
+
+ # - The symbol strip program.
+ local strip ;
+ if <striper> in $(options)
+ {
+ # We can turn off strip by specifying it as empty. In which
+ # case we switch to using the linker to do the strip.
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
+ }
+ else
+ {
+ # Otherwise we need to find a strip program to use. And hence
+ # also tell the link action that we need to use a strip
+ # post-process.
+ flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
+ strip =
+ [ common.get-invocation-command darwin
+ : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.link .STRIP $(condition) : $(strip[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using strip for $(condition) at $(strip[1]) ;
+ }
+ }
+
+ # - The archive builder (libtool is the default as creating
+ # archives in darwin is complicated.
+ local archiver =
+ [ common.get-invocation-command darwin
+ : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
+ }
+
+ # - Initialize the SDKs available in the root for this tool.
+ local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
+
+ #~ ECHO --- ;
+ #~ ECHO --- bin :: $(bin) ;
+ #~ ECHO --- root :: $(root) ;
+ #~ ECHO --- version :: $(version) ;
+ #~ ECHO --- condition :: $(condition) ;
+ #~ ECHO --- strip :: $(strip) ;
+ #~ ECHO --- archiver :: $(archiver) ;
+ #~ ECHO --- sdks :: $(sdks) ;
+ #~ ECHO --- ;
+ #~ EXIT ;
+}
+
+# Add and set options for a discovered SDK version.
+local rule init-sdk ( condition * : root ? : version + : version-feature ? )
+{
+ local rule version-to-feature ( version + )
+ {
+ switch $(version[1])
+ {
+ case iphone* :
+ {
+ return $(version[1])-$(version[2-]:J=.) ;
+ }
+ case mac* :
+ {
+ return $(version[2-]:J=.) ;
+ }
+ case * :
+ {
+ return $(version:J=.) ;
+ }
+ }
+ }
+
+ if $(version-feature)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ;
+ }
+
+ # Add the version to the features for specifying them.
+ if ! $(version-feature) in [ feature.values macosx-version ]
+ {
+ feature.extend macosx-version : $(version-feature) ;
+ }
+ if ! $(version-feature) in [ feature.values macosx-version-min ]
+ {
+ feature.extend macosx-version-min : $(version-feature) ;
+ }
+
+ # Set the flags the version needs to compile with, first
+ # generic options.
+ flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(sdk) ;
+ flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(sdk) ;
+
+ # Then device variation options.
+ switch $(version[1])
+ {
+ case iphonesim* :
+ {
+ local N = $(version[2]) ;
+ if ! $(version[3]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
+ else { N += 0$(version[3]) ; }
+ if ! $(version[4]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
+ else { N += 0$(version[4]) ; }
+ N = $(N:J=) ;
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ }
+
+ case iphone* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ }
+
+ case mac* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ }
+ }
+
+ return $(version-feature) ;
+ }
+ else if $(version[4])
+ {
+ # We have a patch version of an SDK. We want to set up
+ # both the specific patch version, and the minor version.
+ # So we recurse to set up the minor version. Plus the minor version.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+ else
+ {
+ # Yes, this is intentionally recursive.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+}
+
+# Determine the MacOSX SDK versions installed and their locations.
+local rule init-available-sdk-versions ( condition * : root ? )
+{
+ root ?= /Developer ;
+ local sdks-root = $(root)/SDKs ;
+ local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
+ local result ;
+ for local sdk in $(sdks)
+ {
+ local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
+ local sdk-platform = $(sdk-match[1]:L) ;
+ local sdk-version = $(sdk-match[2-]) ;
+ if $(sdk-version)
+ {
+ switch $(sdk-platform)
+ {
+ case macosx :
+ {
+ sdk-version = mac $(sdk-version) ;
+ }
+ case iphoneos :
+ {
+ sdk-version = iphone $(sdk-version) ;
+ }
+ case iphonesimulator :
+ {
+ sdk-version = iphonesim $(sdk-version) ;
+ }
+ case * :
+ {
+ sdk-version = $(sdk-version:J=-) ;
+ }
+ }
+ result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+# Generic options.
+flags darwin.compile OPTIONS <flags> ;
+
+# The following adds objective-c support to darwin.
+# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
+
+generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
+generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local ps = [ property-set.create $(properties) ] ;
+ local arch = [ $(ps).get <architecture> ] ;
+ local address-model = [ $(ps).get <address-model> ] ;
+ local osx-version = [ $(ps).get <macosx-version> ] ;
+ local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
+ gcc-version = $(.real-version.$(gcc-version)) ;
+ local options ;
+
+ local support-ppc64 = 1 ;
+
+ osx-version ?= $(.host-osx-version) ;
+
+ switch $(osx-version)
+ {
+ case iphone* :
+ {
+ support-ppc64 = ;
+ }
+
+ case * :
+ if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
+ {
+ # When targeting 10.6:
+ # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
+ # - gcc 4.0 will compile fine, somehow, but then fail at link time
+ support-ppc64 = ;
+ }
+ }
+ switch $(arch)
+ {
+ case combined :
+ {
+ if $(address-model) = 32_64 {
+ if $(support-ppc64) {
+ options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
+ } else {
+ # Build 3-way binary
+ options = -arch i386 -arch ppc -arch x86_64 ;
+ }
+ } else if $(address-model) = 64 {
+ if $(support-ppc64) {
+ options = -arch x86_64 -arch ppc64 ;
+ } else {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+ } else {
+ options = -arch i386 -arch ppc ;
+ }
+ }
+
+ case x86 :
+ {
+ if $(address-model) = 32_64 {
+ options = -arch i386 -arch x86_64 ;
+ } else if $(address-model) = 64 {
+ options = -arch x86_64 ;
+ } else {
+ options = -arch i386 ;
+ }
+ }
+
+ case power :
+ {
+ if ! $(support-ppc64)
+ && ( $(address-model) = 32_64 || $(address-model) = 64 )
+ {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+
+ if $(address-model) = 32_64 {
+ options = -arch ppc -arch ppc64 ;
+ } else if $(address-model) = 64 {
+ options = -arch ppc64 ;
+ } else {
+ options = -arch ppc ;
+ }
+ }
+
+ case arm :
+ {
+ options = -arch armv6 ;
+ }
+ }
+
+ if $(options)
+ {
+ OPTIONS on $(targets) += $(options) ;
+ }
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+}
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.m ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.mm ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c++" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Set the max header padding to allow renaming of libs for installation.
+flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
+
+# To link the static runtime we need to link to all the core runtime libraries.
+flags darwin.link OPTIONS <runtime-link>static
+ : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
+
+# Strip as much as possible when optimizing.
+flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+
+# Dynamic/shared linking.
+flags darwin.compile OPTIONS <link>shared : -dynamic ;
+
+# Misc options.
+flags darwin.compile OPTIONS : -no-cpp-precomp -gdwarf-2 -fexceptions ;
+#~ flags darwin.link OPTIONS : -fexceptions ;
+
+# Add the framework names to use.
+flags darwin.link FRAMEWORK <framework> ;
+
+#
+flags darwin.link FORCE_LOAD <force-load> ;
+
+# This is flag is useful for debugging the link step
+# uncomment to see what libtool is doing under the hood
+#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
+
+_ = " " ;
+
+# set up the -F option to include the paths to any frameworks used.
+local rule prepare-framework-path ( target + )
+{
+ # The -framework option only takes basename of the framework.
+ # The -F option specifies the directories where a framework
+ # is searched for. So, if we find <framework> feature
+ # with some path, we need to generate property -F option.
+ local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
+
+ # Be sure to generate no -F if there's no path.
+ for local framework-path in $(framework-paths)
+ {
+ if $(framework-path) != ""
+ {
+ FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+ }
+ }
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+# Note that using strip without any options was reported to result in broken
+# binaries, at least on OS X 10.5.5, see:
+# http://svn.boost.org/trac/boost/ticket/2347
+# So we pass -S -x.
+actions link bind LIBRARIES FORCE_LOAD
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+ $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+}
+
+# We use libtool instead of ar to support universal binary linking
+# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
+actions piecemeal archive
+{
+ "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
+}
diff --git a/jam-files/boost-build/tools/dmc.jam b/jam-files/boost-build/tools/dmc.jam
new file mode 100644
index 000000000..8af8725a8
--- /dev/null
+++ b/jam-files/boost-build/tools/dmc.jam
@@ -0,0 +1,134 @@
+# Digital Mars C++
+
+# (C) Copyright Christof Meerwald 2003.
+# (C) Copyright Aleksey Gurtovoy 2004.
+# (C) Copyright Arjan Knepper 2006.
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include version number.
+#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a>
+
+import feature generators common ;
+import toolset : flags ;
+import sequence regex ;
+
+feature.extend toolset : dmc ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters dmc : version $(version) ] ;
+
+ local command = [ common.get-invocation-command dmc : dmc : $(command) ] ;
+ command ?= dmc ;
+
+ common.handle-options dmc : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ if $(root)
+ {
+ # DMC linker is sensitive the the direction of slashes, and
+ # won't link if forward slashes are used in command.
+ root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ;
+ flags dmc .root $(condition) : $(root)\\bin\\ ;
+ }
+ else
+ {
+ flags dmc .root $(condition) : "" ;
+ }
+}
+
+
+# Declare generators
+generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ;
+generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ;
+
+generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ;
+
+
+# Declare flags
+# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds.
+# flags dmc.compile OPTIONS <debug-symbols>on : -g ;
+flags dmc.compile OPTIONS <debug-symbols>on : -gl ;
+flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ;
+flags dmc.link OPTIONS <debug-symbols>off : /PACKF ;
+
+flags dmc.compile OPTIONS <optimization>off : -S -o+none ;
+flags dmc.compile OPTIONS <optimization>speed : -o+time ;
+flags dmc.compile OPTIONS <optimization>space : -o+space ;
+flags dmc.compile OPTIONS <exception-handling>on : -Ae ;
+flags dmc.compile OPTIONS <rtti>on : -Ar ;
+# FIXME:
+# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used
+# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used
+# But for some reason the -WD cflag is always in use.
+# flags dmc.compile OPTIONS <link>shared : -WD ;
+# flags dmc.compile OPTIONS <link>static : -WA ;
+
+# Note that these two options actually imply multithreading support on DMC
+# because there is no single-threaded dynamic runtime library. Specifying
+# <threading>multi would be a bad idea, though, because no option would be
+# matched when the build uses the default settings of <runtime-link>dynamic
+# and <threading>single.
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ;
+
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ;
+
+flags dmc.compile OPTIONS : <cflags> ;
+flags dmc.compile.c++ OPTIONS : <cxxflags> ;
+
+flags dmc.compile DEFINES : <define> ;
+flags dmc.compile INCLUDES : <include> ;
+
+flags dmc.link <linkflags> ;
+flags dmc.archive OPTIONS <arflags> ;
+
+flags dmc LIBPATH <library-path> ;
+flags dmc LIBRARIES <library-file> ;
+flags dmc FINDLIBS <find-library-sa> ;
+flags dmc FINDLIBS <find-library-st> ;
+
+actions together link bind LIBRARIES
+{
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions together link.dll bind LIBRARIES
+{
+ echo LIBRARY "$(<[1])" > $(<[2]:B).def
+ echo DESCRIPTION 'A Library' >> $(<[2]:B).def
+ echo EXETYPE NT >> $(<[2]:B).def
+ echo SUBSYSTEM WINDOWS >> $(<[2]:B).def
+ echo CODE EXECUTE READ >> $(<[2]:B).def
+ echo DATA READ WRITE >> $(<[2]:B).def
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions compile.c
+{
+ "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions together piecemeal archive
+{
+ "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/docutils.jam b/jam-files/boost-build/tools/docutils.jam
new file mode 100644
index 000000000..bf0616174
--- /dev/null
+++ b/jam-files/boost-build/tools/docutils.jam
@@ -0,0 +1,84 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for docutils ReStructuredText processing.
+
+import type ;
+import scanner ;
+import generators ;
+import os ;
+import common ;
+import toolset ;
+import path ;
+import feature : feature ;
+import property ;
+
+.initialized = ;
+
+type.register ReST : rst ;
+
+class rst-scanner : common-scanner
+{
+ rule __init__ ( paths * )
+ {
+ common-scanner.__init__ . $(paths) ;
+ }
+
+ rule pattern ( )
+ {
+ return "^[ ]*\\.\\.[ ]+include::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+image::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+figure::[ ]+([^
+]+)"
+ ;
+ }
+}
+
+scanner.register rst-scanner : include ;
+type.set-scanner ReST : rst-scanner ;
+
+generators.register-standard docutils.html : ReST : HTML ;
+
+rule init ( docutils-dir ? : tools-dir ? )
+{
+ docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
+ tools-dir ?= $(docutils-dir)/tools ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ .docutils-dir = $(docutils-dir) ;
+ .tools-dir = $(tools-dir:R="") ;
+
+ .setup = [
+ common.prepend-path-variable-command PYTHONPATH
+ : $(.docutils-dir) $(.docutils-dir)/extras ] ;
+ }
+}
+
+rule html ( target : source : properties * )
+{
+ if ! [ on $(target) return $(RST2XXX) ]
+ {
+ local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
+ }
+}
+
+
+feature docutils : : free ;
+feature docutils-html : : free ;
+feature docutils-cmd : : free ;
+toolset.flags docutils COMMON-FLAGS : <docutils> ;
+toolset.flags docutils HTML-FLAGS : <docutils-html> ;
+toolset.flags docutils RST2XXX : <docutils-cmd> ;
+
+actions html
+{
+ $(.setup)
+ "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
+}
+
diff --git a/jam-files/boost-build/tools/doxygen-config.jam b/jam-files/boost-build/tools/doxygen-config.jam
new file mode 100644
index 000000000..2cd2ccaeb
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen-config.jam
@@ -0,0 +1,11 @@
+#~ Copyright 2005, 2006 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Doxygen tools. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ;
+
+using doxygen ;
diff --git a/jam-files/boost-build/tools/doxygen.jam b/jam-files/boost-build/tools/doxygen.jam
new file mode 100644
index 000000000..8394848dd
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen.jam
@@ -0,0 +1,776 @@
+# Copyright 2003, 2004 Douglas Gregor
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of various outputs from source
+# files documented with doxygen comments. The supported transformations are:
+#
+# * Source -> Doxygen XML -> BoostBook XML
+# * Source -> Doxygen HTML
+#
+# The type of transformation is selected based on the target requested. For
+# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
+# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
+# HTML specifying a target with an ".html" suffix will produce a directory
+# <target> with the Doxygen html files, and a <target>.html file redirecting to
+# that directory.
+
+import "class" : new ;
+import targets ;
+import feature ;
+import property ;
+import generators ;
+import boostbook ;
+import type ;
+import path ;
+import print ;
+import regex ;
+import stage ;
+import project ;
+import xsltproc ;
+import make ;
+import os ;
+import toolset : flags ;
+import alias ;
+import common ;
+import modules ;
+import project ;
+import utility ;
+import errors ;
+
+
+# Use to specify extra configuration paramters. These get translated
+# into a doxyfile which configures the building of the docs.
+feature.feature doxygen:param : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
+feature.feature prefix : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
+feature.feature reftitle : : free ;
+
+# Which processor to use for various translations from Doxygen.
+feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
+
+# To generate, or not, index sections.
+feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
+
+# The ID for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.id : : free ;
+
+# The title for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.title : : free ;
+
+# Location for images when generating XML
+feature.feature doxygen:xml-imagedir : : free ;
+
+# Indicates whether the entire directory should be deleted
+feature.feature doxygen.rmdir : off on : optional incidental ;
+
+# Doxygen configuration input file.
+type.register DOXYFILE : doxyfile ;
+
+# Doxygen XML multi-file output.
+type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
+
+# Doxygen XML coallesed output.
+type.register DOXYGEN_XML : doxygen : XML ;
+
+# Doxygen HTML multifile directory.
+type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
+
+# Redirection HTML file to HTML multifile directory.
+type.register DOXYGEN_HTML : : HTML ;
+
+type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
+
+# Initialize the Doxygen module. Parameters are:
+# name: the name of the 'doxygen' executable. If not specified, the name
+# 'doxygen' will be used
+#
+rule init ( name ? )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ .doxproc = [ modules.binding $(__name__) ] ;
+ .doxproc = $(.doxproc:D)/doxproc.py ;
+
+ generators.register-composing doxygen.headers-to-doxyfile
+ : H HPP CPP : DOXYFILE ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
+ generators.register-standard doxygen.xml-dir-to-boostbook
+ : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
+ generators.register-standard doxygen.xml-to-boostbook
+ : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
+ generators.register-standard doxygen.collect
+ : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
+ generators.register-standard doxygen.html-redirect
+ : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
+ generators.register-standard doxygen.copy-latex-pngs
+ : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
+
+ IMPORT $(__name__) : doxygen : : doxygen ;
+ }
+
+ if $(name)
+ {
+ modify-config ;
+ .doxygen = $(name) ;
+ check-doxygen ;
+ }
+
+ if ! $(.doxygen)
+ {
+ check-doxygen ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.initialized)
+ {
+ errors.user-error "doxygen must be initialized before it can be used." ;
+ }
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+
+ if [ .is-cygwin ]
+ {
+ .is-cygwin = true ;
+ }
+ }
+}
+
+rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ errors.user-error "Cannot change doxygen after it has been used." ;
+ }
+}
+
+rule check-doxygen ( )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using doxygen ":" $(.doxygen) ;
+ }
+ local extra-paths ;
+ if [ os.name ] = NT
+ {
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ extra-paths = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ extra-paths = "C:\\Program Files" ;
+ }
+ }
+ .doxygen = [ common.get-invocation-command doxygen :
+ doxygen : $(.doxygen) : $(extra-paths) ] ;
+}
+
+rule name ( )
+{
+ freeze-config ;
+ return $(.doxygen) ;
+}
+
+rule .is-cygwin ( )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native
+ [ path.join [ path.parent $(file) ] doxygen ] ] ;
+ local command =
+ "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ;
+ result = [ SHELL $(command) ] ;
+ if [ MATCH "(Parsing file /)" : $(result) ]
+ {
+ return true ;
+ }
+ }
+}
+
+# Runs Doxygen on the given Doxygen configuration file (the source) to generate
+# the Doxygen files. The output is dumped according to the settings in the
+# Doxygen configuration file, not according to the target! Because of this, we
+# essentially "touch" the target file, in effect making it look like we have
+# really written something useful to it. Anyone that uses this action must deal
+# with this behavior.
+#
+actions doxygen-action
+{
+ $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
+}
+
+
+# Runs the Python doxproc XML processor.
+#
+actions doxproc
+{
+ python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
+}
+
+
+rule translate-path ( path )
+{
+ freeze-config ;
+ if [ os.on-windows ]
+ {
+ if [ os.name ] = CYGWIN
+ {
+ if $(.is-cygwin)
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return $(path:W) ;
+ }
+ }
+ else
+ {
+ if $(.is-cygwin)
+ {
+ match = [ MATCH ^(.):(.*) : $(path) ] ;
+ if $(match)
+ {
+ return /cygdrive/$(match[1])$(match[2]:T) ;
+ }
+ else
+ {
+ return $(path:T) ;
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+}
+
+
+# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
+# and a property list that may contain <doxygen:param> features.
+#
+rule headers-to-doxyfile ( target : sources * : properties * )
+{
+ local text "# Generated by Boost.Build version 2" ;
+
+ local output-dir ;
+
+ # Translate <doxygen:param> into command line flags.
+ for local param in [ feature.get-values <doxygen:param> : $(properties) ]
+ {
+ local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ;
+ if $(namevalue[1]) = OUTPUT_DIRECTORY
+ {
+ output-dir = [ translate-path
+ [ utility.unquote $(namevalue[2]) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+ else
+ {
+ text += "$(namevalue[1]) = $(namevalue[2])" ;
+ }
+ }
+
+ if ! $(output-dir)
+ {
+ output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+
+ local headers = ;
+ for local header in $(sources:G=)
+ {
+ header = [ translate-path $(header) ] ;
+ headers += \"$(header)\" ;
+ }
+
+ # Doxygen generates LaTex by default. So disable it unconditionally, or at
+ # least until someone needs, and hence writes support for, LaTex output.
+ text += "GENERATE_LATEX = NO" ;
+ text += "INPUT = $(headers:J= )" ;
+ print.output $(target) plain ;
+ print.text $(text) : true ;
+}
+
+
+# Run Doxygen. See doxygen-action for a description of the strange properties of
+# this rule.
+#
+rule run ( target : source : properties * )
+{
+ freeze-config ;
+ if <doxygen.rmdir>on in $(properties)
+ {
+ local output-dir =
+ [ path.make
+ [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) :
+ $(properties) ] ] ;
+ local html-dir =
+ [ path.make
+ [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
+ $(properties) ] ] ;
+ if $(output-dir) && $(html-dir) &&
+ [ path.glob $(output-dir) : $(html-dir) ]
+ {
+ HTMLDIR on $(target) =
+ [ path.native [ path.join $(output-dir) $(html-dir) ] ] ;
+ rm-htmldir $(target) ;
+ }
+ }
+ doxygen-action $(target) : $(source) ;
+ NAME on $(target) = $(.doxygen) ;
+ RM on $(target) = [ modules.peek common : RM ] ;
+ *.XML on $(target) =
+ [ path.native
+ [ path.join
+ [ path.make [ on $(target) return $(LOCATE) ] ]
+ $(target:B:S=)
+ *.xml ] ] ;
+}
+
+if [ os.name ] = NT
+{
+ RMDIR = rmdir /s /q ;
+}
+else
+{
+ RMDIR = rm -rf ;
+}
+
+actions quietly rm-htmldir
+{
+ $(RMDIR) $(HTMLDIR)
+}
+
+# The rules below require Boost.Book stylesheets, so we need some code to check
+# that the boostbook module has actualy been initialized.
+#
+rule check-boostbook ( )
+{
+ if ! [ modules.peek boostbook : .initialized ]
+ {
+ ECHO "error: the boostbook module is not initialized" ;
+ ECHO "error: you've attempted to use the 'doxygen' toolset, " ;
+ ECHO "error: which requires Boost.Book," ;
+ ECHO "error: but never initialized Boost.Book." ;
+ EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ;
+ }
+}
+
+
+# Collect the set of Doxygen XML files into a single XML source file that can be
+# handled by an XSLT processor. The source is completely ignored (see
+# doxygen-action), because this action picks up the Doxygen XML index file
+# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL
+# program and take a "-o output.xml" argument (grrrr). The target of the
+# collection will be a single Doxygen XML file.
+#
+rule collect ( target : source : properties * )
+{
+ check-boostbook ;
+ local collect-xsl-dir
+ = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
+ local source-path
+ = [ path.make [ on $(source) return $(LOCATE) ] ] ;
+ local collect-path
+ = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
+ local native-path
+ = [ path.native $(collect-path) ] ;
+ local real-source
+ = [ path.native [ path.join $(collect-path) index.xml ] ] ;
+ xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
+ : <xsl:param>doxygen.xml.path=$(native-path) ;
+}
+
+
+# Translate Doxygen XML into BoostBook.
+#
+rule xml-to-boostbook ( target : source : properties * )
+{
+ check-boostbook ;
+ local xsl-dir = [ boostbook.xsl-dir ] ;
+ local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
+ doxygen2boostbook.xsl ] ] ;
+
+ local xslt-properties = $(properties) ;
+ for local prefix in [ feature.get-values <prefix> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
+ }
+ for local title in [ feature.get-values <reftitle> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
+ }
+
+ xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
+}
+
+
+flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ;
+flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
+flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
+
+
+rule xml-dir-to-boostbook ( target : source : properties * )
+{
+ DOXPROC on $(target) = $(.doxproc) ;
+
+ LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
+
+ doxygen.doxproc $(target) : $(source:S=) ;
+}
+
+
+# Generate the HTML redirect to HTML dir index.html file.
+#
+rule html-redirect ( target : source : properties * )
+{
+ local uri = "$(target:B)/index.html" ;
+ print.output $(target) plain ;
+ print.text
+"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
+ \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
+<html xmlns=\"http://www.w3.org/1999/xhtml\">
+<head>
+ <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
+
+ <title></title>
+</head>
+
+<body>
+ Automatic redirection failed, please go to <a href=
+ \"$(uri)\">$(uri)</a>.
+</body>
+</html>
+"
+ : true ;
+}
+
+rule copy-latex-pngs ( target : source : requirements * )
+{
+ local directory = [ path.native
+ [ feature.get-values <doxygen:xml-imagedir> :
+ $(requirements) ] ] ;
+
+ local location = [ on $(target) return $(LOCATE) ] ;
+
+ local pdf-location =
+ [ path.native
+ [ path.join
+ [ path.make $(location) ]
+ [ path.make $(directory) ] ] ] ;
+ local html-location =
+ [ path.native
+ [ path.join
+ .
+ html
+ [ path.make $(directory) ] ] ] ;
+
+ common.MkDir $(pdf-location) ;
+ common.MkDir $(html-location) ;
+
+ DEPENDS $(target) : $(pdf-location) $(html-location) ;
+
+ if [ os.name ] = NT
+ {
+ CP on $(target) = copy /y ;
+ FROM on $(target) = \\*.png ;
+ TOHTML on $(target) = .\\html\\$(directory) ;
+ TOPDF on $(target) = \\$(directory) ;
+ }
+ else
+ {
+ CP on $(target) = cp ;
+ FROM on $(target) = /*.png ;
+ TOHTML on $(target) = ./html/$(directory) ;
+ TOPDF on $(target) = $(target:D)/$(directory) ;
+ }
+}
+
+actions copy-latex-pngs
+{
+ $(CP) $(>:S=)$(FROM) $(TOHTML)
+ $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
+ echo "Stamped" > "$(<)"
+}
+
+# building latex images for doxygen XML depends
+# on latex, dvips, and ps being in your PATH.
+# This is true for most Unix installs, but
+# not on Win32, where you will need to install
+# MkTex and Ghostscript and add these tools
+# to your path.
+
+actions check-latex
+{
+ latex -version >$(<)
+}
+
+actions check-dvips
+{
+ dvips -version >$(<)
+}
+
+if [ os.name ] = "NT"
+{
+ actions check-gs
+ {
+ gswin32c -version >$(<)
+ }
+}
+else
+{
+ actions check-gs
+ {
+ gs -version >$(<)
+ }
+}
+
+rule check-tools ( )
+{
+ if ! $(.check-tools-targets)
+ {
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project =
+ [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ .latex.check = [ new file-target latex.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-latex ]
+ :
+ ] ;
+ .dvips.check = [ new file-target dvips.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-dvips ]
+ :
+ ] ;
+ .gs.check = [ new file-target gs.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-gs ]
+ :
+ ] ;
+ .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ;
+ }
+ return $(.check-tools-targets) ;
+}
+
+project.initialize $(__name__) ;
+project doxygen ;
+
+class doxygen-check-tools-target-class : basic-target
+{
+ import doxygen ;
+ rule construct ( name : sources * : property-set )
+ {
+ return [ property-set.empty ] [ doxygen.check-tools ] ;
+ }
+}
+
+local project = [ project.current ] ;
+
+targets.main-target-alternative
+ [ new doxygen-check-tools-target-class check-tools : $(project)
+ : [ targets.main-target-sources : check-tools : no-renaming ]
+ : [ targets.main-target-requirements : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements : $(project) ]
+ ] ;
+
+# User-level rule to generate BoostBook XML from a set of headers via Doxygen.
+#
+rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
+{
+ freeze-config ;
+ local project = [ project.current ] ;
+
+ if $(target:S) = .html
+ {
+ # Build an HTML directory from the sources.
+ local html-location = [ feature.get-values <location> : $(requirements) ] ;
+ local output-dir ;
+ if [ $(project).get build-dir ]
+ {
+ # Explicitly specified build dir. Add html at the end.
+ output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ;
+ }
+ else
+ {
+ # Trim 'bin' from implicit build dir, for no other reason that backward
+ # compatibility.
+ output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
+ $(html-location:E=html) ] ;
+ }
+ output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
+ local output-dir-native = [ path.native $(output-dir) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+
+ ## The doxygen configuration file.
+ targets.main-target-alternative
+ [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE
+ : [ targets.main-target-sources $(sources) : $(target:S=.tag) ]
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>GENERATE_HTML=YES
+ <doxygen:param>GENERATE_XML=NO
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.tag) ;
+
+ ## The html directory to generate by running doxygen.
+ targets.main-target-alternative
+ [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE
+ : $(target:S=.tag)
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.dir) ;
+
+ ## The redirect html file into the generated html.
+ targets.main-target-alternative
+ [ new typed-target $(target) : $(project) : DOXYGEN_HTML
+ : $(target:S=.dir)
+ : [ targets.main-target-requirements $(requirements)
+ <location>$(output-dir)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ }
+ else
+ {
+ # Build a BoostBook XML file from the sources.
+ local location-xml = [ feature.get-values <location> : $(requirements) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+ local target-xml = $(target:B=$(target:B)-xml) ;
+
+ # Check whether we need to build images
+ local images-location =
+ [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ;
+ if $(images-location)
+ {
+ doxygen $(target).doxygen-xml-images.html : $(sources)
+ : $(requirements)
+ <doxygen.rmdir>on
+ <doxygen:param>QUIET=YES
+ <doxygen:param>WARNINGS=NO
+ <doxygen:param>WARN_IF_UNDOCUMENTED=NO
+ <dependency>/doxygen//check-tools ;
+ $(project).mark-target-as-explicit
+ $(target).doxygen-xml-images.html ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target).doxygen-xml-images
+ : $(project) : DOXYGEN_XML_IMAGES
+ : $(target).doxygen-xml-images.html
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build)
+ : $(project) ]
+ ] ;
+
+ $(project).mark-target-as-explicit
+ $(target).doxygen-xml-images ;
+
+ if ! [ regex.match "^(.*/)$" : $(images-location) ]
+ {
+ images-location = $(images-location)/ ;
+ }
+
+ requirements +=
+ <dependency>$(target).doxygen-xml-images
+ <xsl:param>boost.doxygen.formuladir=$(images-location) ;
+ }
+
+ ## The doxygen configuration file.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE
+ : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ]
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>GENERATE_HTML=NO
+ <doxygen:param>GENERATE_XML=YES
+ <doxygen:param>XML_OUTPUT=$(target-xml)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
+
+ ## The Doxygen XML directory of the processed source files.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE
+ : $(target-xml:S=.tag)
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
+
+ ## The resulting BoostBook file is generated by the processor tool. The
+ ## tool can be either the xsltproc plus accompanying XSL scripts. Or it
+ ## can be the python doxproc.py script.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml) : $(project) : BOOSTBOOK
+ : $(target-xml:S=.dir)
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml) ;
+
+ targets.main-target-alternative
+ [ new install-target-class $(target:S=.xml) : $(project)
+ : $(target-xml)
+ : [ targets.main-target-requirements $(requirements)
+ <location>$(location-xml:E=.)
+ <name>$(target:S=.xml)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.xml) ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(target) : $(project)
+ :
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements)
+ <dependency>$(target:S=.xml)
+ : $(project) ]
+ ] ;
+ }
+}
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
new file mode 100644
index 000000000..9b969df9c
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
@@ -0,0 +1,3 @@
+INPUT = windows-paths-check.hpp
+GENERATE_HTML = NO
+GENERATE_LATEX = NO
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
diff --git a/jam-files/boost-build/tools/fop.jam b/jam-files/boost-build/tools/fop.jam
new file mode 100644
index 000000000..c24b8725f
--- /dev/null
+++ b/jam-files/boost-build/tools/fop.jam
@@ -0,0 +1,69 @@
+# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed
+# under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+# This module defines rules to handle generation of PDF and
+# PostScript files from XSL Formatting Objects via Apache FOP
+
+import generators ;
+import common ;
+import boostbook ;
+
+generators.register-standard fop.render.pdf : FO : PDF ;
+generators.register-standard fop.render.ps : FO : PS ;
+
+# Initializes the fop toolset.
+#
+rule init ( fop-command ? : java-home ? : java ? )
+{
+ local has-command = $(.has-command) ;
+
+ if $(fop-command)
+ {
+ .has-command = true ;
+ }
+
+ if $(fop-command) || ! $(has-command)
+ {
+ fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
+ : [ modules.peek : FOP_DIR ] ] ;
+ }
+
+ if $(fop-command)
+ {
+ .FOP_COMMAND = $(fop-command) ;
+ }
+
+ if $(java-home) || $(java)
+ {
+ .FOP_SETUP = ;
+
+
+ # JAVA_HOME is the location that java was installed to.
+
+ if $(java-home)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
+ }
+
+ # JAVACMD is the location that of the java executable, useful for a
+ # non-standard java installation, where the executable isn't at
+ # $JAVA_HOME/bin/java.
+
+ if $(java)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
+ }
+ }
+}
+
+actions render.pdf
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<)
+}
+
+actions render.ps
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<)
+}
diff --git a/jam-files/boost-build/tools/fortran.jam b/jam-files/boost-build/tools/fortran.jam
new file mode 100644
index 000000000..37665825e
--- /dev/null
+++ b/jam-files/boost-build/tools/fortran.jam
@@ -0,0 +1,55 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# This file contains common settings for all fortran tools
+#
+
+import "class" : new ;
+import feature : feature ;
+
+import type ;
+import generators ;
+import common ;
+
+type.register FORTRAN : f F for f77 ;
+type.register FORTRAN90 : f90 F90 ;
+
+feature fortran : : free ;
+feature fortran90 : : free ;
+
+class fortran-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+class fortran90-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+# FIXME: this is ugly, should find a better way (we'd want client code to
+# register all generators as "generator.some-rule", not with "some-module.some-rule".)
+IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ;
+IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ;
diff --git a/jam-files/boost-build/tools/gcc.jam b/jam-files/boost-build/tools/gcc.jam
new file mode 100644
index 000000000..f7b0da542
--- /dev/null
+++ b/jam-files/boost-build/tools/gcc.jam
@@ -0,0 +1,1185 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2002-2006 Rene Rivera.
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov.
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import pch ;
+import property ;
+import property-set ;
+import toolset ;
+import type ;
+import rc ;
+import regex ;
+import set ;
+import unix ;
+import fortran ;
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+
+feature.extend toolset : gcc ;
+# feature.subfeature toolset gcc : flavor : : optional ;
+
+toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags gcc : unix ;
+toolset.inherit-rules gcc : unix ;
+
+generators.override gcc.prebuilt : builtin.prebuilt ;
+generators.override gcc.searched-lib-generator : searched-lib-generator ;
+
+# Make gcc toolset object files use the "o" suffix on all platforms.
+type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
+
+# Initializes the gcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
+# sun and the default value will be selected based on the current OS.
+# Example:
+# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+# The compiler command to use is detected in a three step manner:
+# 1) If an explicit command is specified by the user, it will be used and must available.
+# 2) If only a certain version is specified, it is enforced:
+# - either a command 'g++-VERSION' must be available
+# - or the default command 'g++' must be available and match the exact version.
+# 3) Without user-provided restrictions use default 'g++'
+rule init ( version ? : command * : options * )
+{
+ #1): use user-provided command
+ local tool-command = ;
+ if $(command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "provided command '$(command)' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ #2): enforce user-provided version
+ else if $(version)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ;
+
+ #2.1) fallback: check whether "g++" reports the requested version
+ if ! $(tool-command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if $(tool-command)
+ {
+ local tool-command-string = $(tool-command:J=" ") ;
+ local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
+ if $(tool-version) != $(version)
+ {
+ # Permit a match betwen two-digit version specified by the user
+ # (e.g. 4.4) and 3-digit version reported by gcc.
+ # Since only two digits are present in binary name anyway,
+ # insisting that user specify 3-digit version when
+ # configuring Boost.Build while it's not required on
+ # command like would be strange.
+ local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ;
+ if $(stripped) != $(version)
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ tool-command = ;
+ }
+ # Use full 3-digit version to be compatible with the 'using gcc ;' case
+ version = $(tool-version) ;
+ }
+ }
+ else
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ }
+ #3) default: no command and no version specified, try using default command "g++"
+ else
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "no command provided, default command 'g++' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+
+
+ # Information about the gcc command...
+ # The command.
+ local command = $(tool-command) ;
+ # The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+ # The bin directory where to find the command to execute.
+ local bin ;
+ # The flavor of compiler.
+ local flavor = [ feature.get-values <flavor> : $(options) ] ;
+ # Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+ }
+ # The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ # Autodetect the version and flavor if not given.
+ if $(command)
+ {
+ local machine = [ MATCH "^([^ ]+)"
+ : [ SHELL "$(command-string) -dumpmachine" ] ] ;
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ switch $(machine:L)
+ {
+ case *mingw* : flavor ?= mingw ;
+ }
+ }
+
+ local condition ;
+ if $(flavor)
+ {
+ condition = [ common.check-init-parameters gcc
+ : version $(version)
+ : flavor $(flavor)
+ ] ;
+ }
+ else
+ {
+ condition = [ common.check-init-parameters gcc
+ : version $(version)
+ ] ;
+ condition = $(condition) ; #/<toolset-gcc:flavor> ;
+ }
+
+ common.handle-options gcc : $(condition) : $(command) : $(options) ;
+
+ local linker = [ feature.get-values <linker-type> : $(options) ] ;
+ # The logic below should actually be keyed on <target-os>
+ if ! $(linker)
+ {
+ if [ os.name ] = OSF
+ {
+ linker = osf ;
+ }
+ else if [ os.name ] = HPUX
+ {
+ linker = hpux ;
+ }
+ else if [ os.name ] = AIX
+ {
+ linker = aix ;
+ }
+ else if [ os.name ] = SOLARIS
+ {
+ linker = sun ;
+ }
+ else
+ {
+ linker = gnu ;
+ }
+ }
+ init-link-flags gcc $(linker) $(condition) ;
+
+
+ # If gcc is installed in non-standard location, we'd need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if $(command)
+ {
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we don't provide a clean way to build 32-bit
+ # binary with 64-bit compiler, but user can always pass -m32 manually.
+ local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
+ }
+ toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+
+ # If it's not a system gcc install we should adjust the various programs as
+ # needed to prefer using the install specific versions. This is essential
+ # for correct use of MinGW and for cross-compiling.
+
+ local nl = "
+" ;
+
+ # - The archive builder.
+ local archiver = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ]
+ : [ feature.get-values <archiver> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
+ }
+
+ # - Ranlib
+ local ranlib = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ]
+ : [ feature.get-values <ranlib> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ;
+ }
+
+
+ # - The resource compiler.
+ local rc =
+ [ common.get-invocation-command-nodefault gcc
+ : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
+ local rc-type =
+ [ feature.get-values <rc-type> : $(options) ] ;
+ rc-type ?= windres ;
+ if ! $(rc)
+ {
+ # If we can't find an RC compiler we fallback to a null RC compiler that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses the assembler to create the empty
+ # objects, so configure that.
+ rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ;
+ rc-type = null ;
+ }
+ rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
+}
+
+if [ os.name ] = NT
+{
+ # This causes single-line command invocation to not go through .bat files,
+ # thus avoiding command-line length limitations.
+ JAMSHELL = % ;
+}
+
+generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
+generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ;
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
+
+type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
+
+# GCC-specific pch generator.
+class gcc-pch-generator : pch-generator
+{
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ # Find the header in sources. Ignore any CPP sources.
+ local header ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ header = $(s) ;
+ }
+ }
+
+ # Error handling: Base header file name should be the same as the base
+ # precompiled header name.
+ local header-name = [ $(header).name ] ;
+ local header-basename = $(header-name:B) ;
+ if $(header-basename) != $(name)
+ {
+ local location = [ $(project).project-module ] ;
+ errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
+ }
+
+ local pch-file = [ generator.run $(project) $(name) : $(property-set)
+ : $(header) ] ;
+
+ # return result of base class and pch-file property as usage-requirements
+ return
+ [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
+ $(pch-file)
+ ;
+ }
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ name = [ $(sources[1]).name ] ;
+ return [ generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
+generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
+
+# Override default do-nothing generators.
+generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
+generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
+
+# Declare flags and action for compilation.
+toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ;
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ local link = [ feature.get-values link : $(properties) ] ;
+ if $(link) = shared
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even though
+ # it is not needed, as 'd' is used only in exe. However, it is hard to
+ # detect where a target is going to be used. Alternatively, we can set -fPIC
+ # only when main target type is LIB but than 'b' would be compiled without
+ # -fPIC which would lead to link errors on x86-64. So, compile everything
+ # with -fPIC.
+ #
+ # Yet another alternative would be to create a propagated <sharedable>
+ # feature and set it when building shared libraries, but that would be hard
+ # to implement and would increase the target path length even more.
+
+ # On Windows, fPIC is default, specifying -fPIC explicitly leads to
+ # a warning.
+ if $(target) != cygwin && $(target) != windows
+ {
+ OPTIONS on $(targets) += -fPIC ;
+ }
+ }
+}
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local model = [ feature.get-values address-model : $(properties) ] ;
+ if $(model)
+ {
+ local option ;
+ local os = [ feature.get-values target-os : $(properties) ] ;
+ if $(os) = aix
+ {
+ if $(model) = 32
+ {
+ option = -maix32 ;
+ }
+ else
+ {
+ option = -maix64 ;
+ }
+ }
+ else if $(os) = hpux
+ {
+ if $(model) = 32
+ {
+ option = -milp32 ;
+ }
+ else
+ {
+ option = -mlp64 ;
+ }
+ }
+ else
+ {
+ if $(model) = 32
+ {
+ option = -m32 ;
+ }
+ else if $(model) = 64
+ {
+ option = -m64 ;
+ }
+ # For darwin, the model can be 32_64. darwin.jam will handle that
+ # on its own.
+ }
+ OPTIONS on $(targets) += $(option) ;
+ }
+}
+
+
+# FIXME: this should not use os.name.
+if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX
+{
+ # OSF does have an option called -soname but it does not seem to work as
+ # expected, therefore it has been disabled.
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = -h ;
+}
+
+# HPUX, for some reason, seem to use '+h', not '-h'.
+if [ os.name ] = HPUX
+{
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = +h ;
+}
+
+toolset.flags gcc.compile USER_OPTIONS <cflags> ;
+toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
+toolset.flags gcc.compile DEFINES <define> ;
+toolset.flags gcc.compile INCLUDES <include> ;
+toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch
+{
+ "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(<) = 128 ;
+ }
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.fortran ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
+}
+
+actions compile.c.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.asm ( targets * : sources * : properties * )
+{
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ LANG on $(<) = "-x assembler-with-cpp" ;
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# The class which check that we don't try to use the <runtime-link>static
+# property while creating or using shared library, since it's not supported by
+# gcc/libc.
+class gcc-linking-generator : unix-linking-generator
+{
+ rule run ( project name ? : property-set : sources + )
+ {
+ # TODO: Replace this with the use of a target-os property.
+ local no-static-link = ;
+ if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case * : no-static-link = true ;
+ }
+ }
+
+ local properties = [ $(property-set).raw ] ;
+ local reason ;
+ if $(no-static-link) && <runtime-link>static in $(properties)
+ {
+ if <link>shared in $(properties)
+ {
+ reason =
+ "On gcc, DLL can't be build with '<runtime-link>static'." ;
+ }
+ else if [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ reason =
+ "On gcc, using DLLS together with the"
+ "<runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ }
+ if $(reason)
+ {
+ ECHO warning:
+ $(reason) ;
+ ECHO warning:
+ "It is suggested to use '<runtime-link>static' together"
+ "with '<link>static'." ;
+ return ;
+ }
+ else
+ {
+ local generated-targets = [ unix-linking-generator.run $(project)
+ $(name) : $(property-set) : $(sources) ] ;
+ return $(generated-targets) ;
+ }
+ }
+}
+
+# The set of permissible input types is different on mingw.
+# So, define two sets of generators, with mingw generators
+# selected when target-os=windows.
+
+local g ;
+g = [ new gcc-linking-generator gcc.mingw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.mingw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.register
+ [ new gcc-linking-generator gcc.link
+ : LIB OBJ
+ : EXE
+ : <toolset>gcc ] ;
+generators.register
+ [ new gcc-linking-generator gcc.link.dll
+ : LIB OBJ
+ : SHARED_LIB
+ : <toolset>gcc ] ;
+
+generators.override gcc.mingw.link : gcc.link ;
+generators.override gcc.mingw.link.dll : gcc.link.dll ;
+
+# Cygwin is similar to msvc and mingw in that it uses import libraries.
+# While in simple cases, it can directly link to a shared library,
+# it is believed to be slower, and not always possible. Define cygwin-specific
+# generators here.
+
+g = [ new gcc-linking-generator gcc.cygwin.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.cygwin.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.override gcc.cygwin.link : gcc.link ;
+generators.override gcc.cygwin.link.dll : gcc.link.dll ;
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.link USER_OPTIONS <linkflags> ;
+toolset.flags gcc.link LINKPATH <library-path> ;
+toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags gcc.link LIBRARIES <library-file> ;
+
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if [ os.name ] != HPUX
+{
+ toolset.flags gcc.link OPTIONS <runtime-link>static : -static ;
+}
+
+# Now, the vendor specific flags.
+# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
+rule init-link-flags ( toolset linker condition )
+{
+ switch $(linker)
+ {
+ case aix :
+ {
+ #
+ # On AIX we *have* to use the native linker.
+ #
+ # Using -brtl, the AIX linker will look for libraries with both the .a
+ # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
+ # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
+ # file that may contain shared objects and is different from static libs
+ # as on Linux.
+ #
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definately for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+
+ toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath
+ : unchecked ;
+ }
+
+ case darwin :
+ {
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, don't pass -s.
+ # at all, darwin.jam will use separate 'strip' invocation.
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ }
+
+ case gnu :
+ {
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
+ toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+ # and change search for -l switches that follow them. The following list
+ # shows the tried variants.
+ # The search stops at the first variant that has a match.
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw,cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw,cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
+ : -Wl,-Bstatic : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
+ : -Wl,-Bdynamic : unchecked ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bstatic : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bdynamic : unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bstatic : unchecked ;
+ }
+
+ case hpux :
+ {
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
+ : -fPIC : unchecked ;
+ }
+
+ case osf :
+ {
+ # No --strip-all, just -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
+ : unchecked ;
+ # This does not supports -R.
+ toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath
+ : unchecked ;
+ # -rpath-link is not supported at all.
+ }
+
+ case sun :
+ {
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
+ : unchecked ;
+ # Solaris linker does not have a separate -rpath-link, but allows to use
+ # -L for the same purpose.
+ toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path>
+ : unchecked ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
+ : -mimpure-text : unchecked ;
+ }
+
+ case * :
+ {
+ errors.user-error
+ "$(toolset) initialization: invalid linker '$(linker)'" :
+ "The value '$(linker)' specified for <linker> is not recognized." :
+ "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ;
+ }
+ }
+}
+
+# Enclose the RPATH variable on 'targets' in (double) quotes,
+# unless it's already enclosed in single quotes.
+# This special casing is done because it's common to pass
+# '$ORIGIN' to linker -- and it has to have single quotes
+# to prevent expansion by shell -- and if we add double
+# quotes then preventing properties of single quotes disappear.
+rule quote-rpath ( targets * )
+{
+ local r = [ on $(targets[1]) return $(RPATH) ] ;
+ if ! [ MATCH "('.*')" : $(r) ]
+ {
+ r = "\"$(r)\"" ;
+ }
+ RPATH on $(targets) = $(r) ;
+}
+
+# Declare actions for linking.
+rule link ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+
+}
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+.AR = ar ;
+.RANLIB = ranlib ;
+
+toolset.flags gcc.archive AROPTIONS <archiveflags> ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here is the rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+# Differs from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ local threading = [ feature.get-values threading : $(properties) ] ;
+ if $(threading) = multi
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+ local option ;
+ local libs ;
+
+ switch $(target)
+ {
+ case windows :
+ {
+ option = -mthreads ;
+ }
+ case cygwin :
+ {
+ option = -mthreads ;
+ }
+ case solaris :
+ {
+ option = -pthreads ;
+ libs = rt ;
+ }
+ case beos :
+ {
+ # BeOS has no threading options, so do not set anything here.
+ }
+ case *bsd :
+ {
+ option = -pthread ;
+ # There is no -lrt on BSD.
+ }
+ case sgi :
+ {
+ # gcc on IRIX does not support multi-threading so do not set anything
+ # here.
+ }
+ case darwin :
+ {
+ # Darwin has no threading options so do not set anything here.
+ }
+ case * :
+ {
+ option = -pthread ;
+ libs = rt ;
+ }
+ }
+
+ if $(option)
+ {
+ OPTIONS on $(targets) += $(option) ;
+ }
+ if $(libs)
+ {
+ FINDLIBS-SA on $(targets) += $(libs) ;
+ }
+ }
+}
+
+local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? )
+{
+ if $(default)
+ {
+ toolset.flags $(toolset) $(variable)
+ <architecture>$(architecture)/<instruction-set>
+ : $(values) ;
+ }
+ toolset.flags $(toolset) $(variable)
+ <architecture>/<instruction-set>$(instruction-set)
+ <architecture>$(architecture)/<instruction-set>$(instruction-set)
+ : $(values) ;
+}
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+# The 'native' option appeared in gcc 4.2 so we cannot safely use it
+# as default. Use conservative i386 instead.
+cpu-flags gcc OPTIONS : x86 : native : -march=native ;
+cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ;
+cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
+cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
+cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
+cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
+cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
+cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
+cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
+cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
+cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
+cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
+cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
+cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
+cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
+cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
+cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
+cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
+cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
+cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
+cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
+cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
+cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
+##
+cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
+cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
+cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
+cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
+cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
+cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
+cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
+cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
+# Sparc
+cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ;
+cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ;
+cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
+cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
+cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
+cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
+cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
+cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
+cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
+cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
+cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
+# RS/6000 & PowerPC
+cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
+cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
+cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
+cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
+cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
+cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
+cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
+cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
+cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
+cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
+cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
+cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
+cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
+cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
+cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
+cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
+cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
+cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
+cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
+cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
+cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
+cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
+cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
+cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
+cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
+cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
+cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
+cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
+cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
+cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
+cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
+cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
+# AIX variant of RS/6000 & PowerPC
+toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ;
diff --git a/jam-files/boost-build/tools/generate.jam b/jam-files/boost-build/tools/generate.jam
new file mode 100644
index 000000000..6732fa355
--- /dev/null
+++ b/jam-files/boost-build/tools/generate.jam
@@ -0,0 +1,108 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Declares main target 'generate' used to produce targets by calling a
+# user-provided rule that takes and produces virtual targets.
+
+import "class" : new ;
+import errors ;
+import feature ;
+import project ;
+import property ;
+import property-set ;
+import targets ;
+import regex ;
+
+
+feature.feature generating-rule : : free ;
+
+
+class generated-target-class : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ if ! [ $(self.requirements).get <generating-rule> ]
+ {
+ errors.user-error "The generate rule requires the <generating-rule>"
+ "property to be set" ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ local result ;
+ local gr = [ $(property-set).get <generating-rule> ] ;
+
+ # FIXME: this is a copy-paste from virtual-target.jam. We should add a
+ # utility rule to call a rule like this.
+ local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
+ if $(rule-name)
+ {
+ if $(gr[2])
+ {
+ local target-name = [ full-name ] ;
+ errors.user-error "Multiple <generating-rule> properties"
+ "encountered for target $(target-name)." ;
+ }
+
+ result = [ indirect.call $(rule-name) $(self.project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ if ! $(result)
+ {
+ ECHO "warning: Unable to construct" [ full-name ] ;
+ }
+ }
+
+ local ur ;
+ local targets ;
+
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ ur = $(result[1]) ;
+ targets = $(result[2-]) ;
+ }
+ else
+ {
+ ur = [ property-set.empty ] ;
+ targets = $(result) ;
+ }
+ }
+ # FIXME: the following loop should be doable using sequence.transform or
+ # some similar utility rule.
+ local rt ;
+ for local t in $(targets)
+ {
+ rt += [ virtual-target.register $(t) ] ;
+ }
+ return $(ur) $(rt) ;
+ }
+}
+
+
+rule generate ( name : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new generated-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+
+IMPORT $(__name__) : generate : : generate ;
diff --git a/jam-files/boost-build/tools/gettext.jam b/jam-files/boost-build/tools/gettext.jam
new file mode 100644
index 000000000..99a43ffe9
--- /dev/null
+++ b/jam-files/boost-build/tools/gettext.jam
@@ -0,0 +1,230 @@
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module support GNU gettext internationalization utilities.
+#
+# It provides two main target rules: 'gettext.catalog', used for
+# creating machine-readable catalogs from translations files, and
+# 'gettext.update', used for update translation files from modified
+# sources.
+#
+# To add i18n support to your application you should follow these
+# steps.
+#
+# - Decide on a file name which will contain translations and
+# what main target name will be used to update it. For example::
+#
+# gettext.update update-russian : russian.po a.cpp my_app ;
+#
+# - Create the initial translation file by running::
+#
+# bjam update-russian
+#
+# - Edit russian.po. For example, you might change fields like LastTranslator.
+#
+# - Create a main target for final message catalog::
+#
+# gettext.catalog russian : russian.po ;
+#
+# The machine-readable catalog will be updated whenever you update
+# "russian.po". The "russian.po" file will be updated only on explicit
+# request. When you're ready to update translations, you should
+#
+# - Run::
+#
+# bjam update-russian
+#
+# - Edit "russian.po" in appropriate editor.
+#
+# The next bjam run will convert "russian.po" into machine-readable form.
+#
+# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
+# feature can be used to alter this.
+
+
+import targets ;
+import property-set ;
+import virtual-target ;
+import "class" : new ;
+import project ;
+import type ;
+import generators ;
+import errors ;
+import feature : feature ;
+import toolset : flags ;
+import regex ;
+
+.path = "" ;
+
+# Initializes the gettext module.
+rule init ( path ? # Path where all tools are located. If not specified,
+ # they should be in PATH.
+ )
+{
+ if $(.initialized) && $(.path) != $(path)
+ {
+ errors.error "Attempt to reconfigure with different path" ;
+ }
+ .initialized = true ;
+ if $(path)
+ {
+ .path = $(path)/ ;
+ }
+}
+
+# Creates a main target 'name', which, when updated, will cause
+# file 'existing-translation' to be updated with translations
+# extracted from 'sources'. It's possible to specify main target
+# in sources --- it which case all target from dependency graph
+# of those main targets will be scanned, provided they are of
+# appropricate type. The 'gettext.types' feature can be used to
+# control the types.
+#
+# The target will be updated only if explicitly requested on the
+# command line.
+rule update ( name : existing-translation sources + : requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : gettext.UPDATE :
+ $(existing-translation) $(sources)
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(name) ;
+}
+
+
+# The human editable source, containing translation.
+type.register gettext.PO : po ;
+# The machine readable message catalog.
+type.register gettext.catalog : mo ;
+# Intermediate type produce by extracting translations from
+# sources.
+type.register gettext.POT : pot ;
+# Pseudo type used to invoke update-translations generator
+type.register gettext.UPDATE ;
+
+# Identifies the keyword that should be used when scanning sources.
+# Default: i18n
+feature gettext.keyword : : free ;
+# Contains space-separated list of sources types which should be scanned.
+# Default: "C CPP"
+feature gettext.types : : free ;
+
+generators.register-standard gettext.compile : gettext.PO : gettext.catalog ;
+
+class update-translations-generator : generator
+{
+ import regex : split ;
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ # The rule should be called with at least two sources. The first source
+ # is the translation (.po) file to update. The remaining sources are targets
+ # which should be scanned for new messages. All sources files for those targets
+ # will be found and passed to the 'xgettext' utility, which extracts the
+ # messages for localization. Those messages will be merged to the .po file.
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local types = [ $(property-set).get <gettext.types> ] ;
+ types ?= "C CPP" ;
+ types = [ regex.split $(types) " " ] ;
+
+ local keywords = [ $(property-set).get <gettext.keyword> ] ;
+ property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
+
+ # First deterime the list of sources that must be scanned for
+ # messages.
+ local all-sources ;
+ # CONSIDER: I'm not sure if the logic should be the same as for 'stage':
+ # i.e. following dependency properties as well.
+ for local s in $(sources[2-])
+ {
+ all-sources += [ virtual-target.traverse $(s) : : include-sources ] ;
+ }
+ local right-sources ;
+ for local s in $(all-sources)
+ {
+ if [ $(s).type ] in $(types)
+ {
+ right-sources += $(s) ;
+ }
+ }
+
+ local .constructed ;
+ if $(right-sources)
+ {
+ # Create the POT file, which will contain list of messages extracted
+ # from the sources.
+ local extract =
+ [ new action $(right-sources) : gettext.extract : $(property-set) ] ;
+ local new-messages = [ new file-target $(name) : gettext.POT
+ : $(project) : $(extract) ] ;
+
+ # Create a notfile target which will update the existing translation file
+ # with new messages.
+ local a = [ new action $(sources[1]) $(new-messages)
+ : gettext.update-po-dispatch ] ;
+ local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
+ .constructed = [ virtual-target.register $(r) ] ;
+ }
+ else
+ {
+ errors.error "No source could be scanned by gettext tools" ;
+ }
+ return $(.constructed) ;
+ }
+}
+generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
+
+flags gettext.extract KEYWORD <gettext.keyword> ;
+actions extract
+{
+ $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>)
+}
+
+# Does realy updating of po file. The tricky part is that
+# we're actually updating one of the sources:
+# $(<) is the NOTFILE target we're updating
+# $(>[1]) is the PO file to be really updated.
+# $(>[2]) is the PO file created from sources.
+#
+# When file to be updated does not exist (during the
+# first run), we need to copy the file created from sources.
+# In all other cases, we need to update the file.
+rule update-po-dispatch
+{
+ NOCARE $(>[1]) ;
+ gettext.create-po $(<) : $(>) ;
+ gettext.update-po $(<) : $(>) ;
+ _ on $(<) = " " ;
+ ok on $(<) = "" ;
+ EXISTING_PO on $(<) = $(>[1]) ;
+}
+
+# Due to fancy interaction of existing and updated, this rule can be called with
+# one source, in which case we copy the lonely source into EXISTING_PO, or with
+# two sources, in which case the action body expands to nothing. I'd really like
+# to have "missing" action modifier.
+actions quietly existing updated create-po bind EXISTING_PO
+{
+ cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
+}
+
+actions updated update-po bind EXISTING_PO
+{
+ $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
+}
+
+actions gettext.compile
+{
+ $(.path)msgfmt -o $(<) $(>)
+}
+
+IMPORT $(__name__) : update : : gettext.update ;
diff --git a/jam-files/boost-build/tools/gfortran.jam b/jam-files/boost-build/tools/gfortran.jam
new file mode 100644
index 000000000..0aa69b85c
--- /dev/null
+++ b/jam-files/boost-build/tools/gfortran.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags gfortran OPTIONS <fflags> ;
+
+flags gfortran OPTIONS <optimization>off : -O0 ;
+flags gfortran OPTIONS <optimization>speed : -O3 ;
+flags gfortran OPTIONS <optimization>space : -Os ;
+
+flags gfortran OPTIONS <debug-symbols>on : -g ;
+flags gfortran OPTIONS <profiling>on : -pg ;
+
+flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ;
+
+flags gfortran DEFINES <define> ;
+flags gfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;
diff --git a/jam-files/boost-build/tools/hp_cxx.jam b/jam-files/boost-build/tools/hp_cxx.jam
new file mode 100644
index 000000000..86cd783e2
--- /dev/null
+++ b/jam-files/boost-build/tools/hp_cxx.jam
@@ -0,0 +1,181 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2004, 2005 Markus Schoepflin.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# HP CXX compiler
+# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN
+#
+#
+# Notes on this toolset:
+#
+# - Because of very subtle issues with the default ansi mode, strict_ansi mode
+# is used for compilation. One example of things that don't work correctly in
+# the default ansi mode is overload resolution of function templates when
+# mixed with non-template functions.
+#
+# - For template instantiation "-timplicit_local" is used. Previously,
+# "-tlocal" has been tried to avoid the need for a template repository
+# but this doesn't work with manually instantiated templates. "-tweak"
+# has not been used to avoid the stream of warning messages issued by
+# ar or ld when creating a library or linking an application.
+#
+# - Debug symbols are generated with "-g3", as this works both in debug and
+# release mode. When compiling C++ code without optimization, we additionally
+# use "-gall", which generates full symbol table information for all classes,
+# structs, and unions. As this turns off optimization, it can't be used when
+# optimization is needed.
+#
+
+import feature generators common ;
+import toolset : flags ;
+
+feature.extend toolset : hp_cxx ;
+feature.extend c++abi : cxxarm ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit hp_cxx : unix ;
+
+generators.override hp_cxx.prebuilt : builtin.lib-generator ;
+generators.override hp_cxx.prebuilt : builtin.prebuilt ;
+generators.override hp_cxx.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ;
+
+ local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags hp_cxx .root $(condition) : "\"$(root)\"/" ;
+ }
+ }
+ # If we can't find 'cxx' anyway, at least show 'cxx' in the commands
+ command ?= cxx ;
+
+ common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ;
+generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ;
+
+
+
+# No static linking as far as I can tell.
+# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
+flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ;
+flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ;
+flags hp_cxx.link OPTIONS <debug-symbols>on : -g ;
+flags hp_cxx.link OPTIONS <debug-symbols>off : -s ;
+
+flags hp_cxx.compile OPTIONS <optimization>off : -O0 ;
+flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ;
+flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ;
+
+# This (undocumented) macro needs to be defined to get all C function
+# overloads required by the C++ standard.
+flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ;
+
+# Added for threading support
+flags hp_cxx.compile OPTIONS <threading>multi : -pthread ;
+flags hp_cxx.link OPTIONS <threading>multi : -pthread ;
+
+flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ;
+flags hp_cxx.compile OPTIONS <optimization>space : -O1 ;
+flags hp_cxx.compile OPTIONS <inlining>off : -inline none ;
+
+# The compiler versions tried (up to V6.5-040) hang when compiling Boost code
+# with full inlining enabled. So leave it at the default level for now.
+#
+# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ;
+
+flags hp_cxx.compile OPTIONS <profiling>on : -pg ;
+flags hp_cxx.link OPTIONS <profiling>on : -pg ;
+
+# Selection of the object model. This flag is needed on both the C++ compiler
+# and linker command line.
+
+# Unspecified ABI translates to '-model ansi' as most
+# standard-conforming.
+flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ;
+flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ;
+
+# Display a descriptive tag together with each compiler message. This tag can
+# be used by the user to explicitely suppress the compiler message.
+flags hp_cxx.compile OPTIONS : -msg_display_tag ;
+
+flags hp_cxx.compile OPTIONS <cflags> ;
+flags hp_cxx.compile.c++ OPTIONS <cxxflags> ;
+flags hp_cxx.compile DEFINES <define> ;
+flags hp_cxx.compile INCLUDES <include> ;
+flags hp_cxx.link OPTIONS <linkflags> ;
+
+flags hp_cxx.link LIBPATH <library-path> ;
+flags hp_cxx.link LIBRARIES <library-file> ;
+flags hp_cxx.link FINDLIBS-ST <find-static-library> ;
+flags hp_cxx.link FINDLIBS-SA <find-shared-library> ;
+
+flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm
+}
+
+
+# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
+# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
+# is the default, no special flag is needed.
+actions compile.c
+{
+ $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Note: The compiler is forced to compile the files as C++ (-x cxx) because
+# otherwise it will silently ignore files with no file extension.
+#
+# Note: We deliberately don't suppress any warnings on the compiler command
+# line, the user can always do this in a customized toolset later on.
+
+rule compile.c++
+{
+ # We preprocess the TEMPLATE_DEPTH command line option here because we found
+ # no way to do it correctly in the actual action code. There we either get
+ # the -pending_instantiations parameter when no c++-template-depth property
+ # has been specified or we get additional quotes around
+ # "-pending_instantiations ".
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ;
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc $(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/hpfortran.jam b/jam-files/boost-build/tools/hpfortran.jam
new file mode 100644
index 000000000..96e8d18b5
--- /dev/null
+++ b/jam-files/boost-build/tools/hpfortran.jam
@@ -0,0 +1,35 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags hpfortran OPTIONS <optimization>off : -O0 ;
+flags hpfortran OPTIONS <optimization>speed : -O3 ;
+flags hpfortran OPTIONS <optimization>space : -O1 ;
+
+flags hpfortran OPTIONS <debug-symbols>on : -g ;
+flags hpfortran OPTIONS <profiling>on : -pg ;
+
+flags hpfortran DEFINES <define> ;
+flags hpfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/ifort.jam b/jam-files/boost-build/tools/ifort.jam
new file mode 100644
index 000000000..eb7c19881
--- /dev/null
+++ b/jam-files/boost-build/tools/ifort.jam
@@ -0,0 +1,44 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags ifort OPTIONS <fflags> ;
+
+flags ifort OPTIONS <optimization>off : /Od ;
+flags ifort OPTIONS <optimization>speed : /O3 ;
+flags ifort OPTIONS <optimization>space : /O1 ;
+
+flags ifort OPTIONS <debug-symbols>on : /debug:full ;
+flags ifort OPTIONS <profiling>on : /Qprof_gen ;
+
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+flags ifort DEFINES <define> ;
+flags ifort INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/intel-darwin.jam b/jam-files/boost-build/tools/intel-darwin.jam
new file mode 100644
index 000000000..aa0fd8fb6
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-darwin.jam
@@ -0,0 +1,220 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import intel ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset intel : platform : darwin ;
+
+toolset.inherit-generators intel-darwin
+ <toolset>intel <toolset-intel:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override intel-darwin.prebuilt : builtin.lib-generator ;
+generators.override intel-darwin.prebuilt : builtin.prebuilt ;
+generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules intel-darwin : gcc ;
+toolset.inherit-flags intel-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the intel-darwin toolset
+# version in mandatory
+# name (default icc) is used to invoke the specified intel complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-darwin
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command intel-darwin : icc
+ : $(command) : /opt/intel_cc_80/bin ] ;
+
+ common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags intel-darwin darwin $(condition) ;
+
+ # handle <library-path>
+ # local library-path = [ feature.get-values <library-path> : $(options) ] ;
+ # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
+ }
+ flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+
+ local m = [ MATCH (..).* : $(version) ] ;
+ local n = [ MATCH (.)\\. : $(m) ] ;
+ if $(n) {
+ m = $(n) ;
+ }
+
+ local major = $(m) ;
+
+ if $(major) = "9" {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
+ }
+ else {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
+ }
+
+ local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
+
+ # wchar_t char_traits workaround for compilers older than 10.2
+ if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
+ flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
+ }
+}
+
+SPACE = " " ;
+
+flags intel-darwin.compile OPTIONS <cflags> ;
+flags intel-darwin.compile OPTIONS <cxxflags> ;
+# flags intel-darwin.compile INCLUDES <include> ;
+
+flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
+
+#
+cpu-type-em64t = prescott nocona ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ;
+
+flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
+
+flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags intel-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of intel-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags intel-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/intel-linux.jam b/jam-files/boost-build/tools/intel-linux.jam
new file mode 100644
index 000000000..d9164add8
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-linux.jam
@@ -0,0 +1,250 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2011 Bryce Lelbach
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import intel ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset intel : platform : linux ;
+
+toolset.inherit-generators intel-linux
+ <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+generators.override intel-linux.prebuilt : builtin.lib-generator ;
+generators.override intel-linux.prebuilt : builtin.prebuilt ;
+generators.override intel-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ;
+
+toolset.inherit-rules intel-linux : gcc ;
+toolset.inherit-flags intel-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the intel-linux toolset
+# version in mandatory
+# name (default icpc) is used to invoke the specified intel-linux complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-linux
+ : version $(version) ] ;
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: intel-linux version is" $(version) ;
+ }
+
+ local default_path ;
+
+ # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0,
+ # aka intel-linux-12.0. In this version, Intel thankfully decides to install
+ # to a sane 'intel' folder in /opt.
+ if [ MATCH "(12[.]0|12)" : $(version) ]
+ { default_path = /opt/intel/bin ; }
+ # Intel C++ Compiler 11.1.
+ else if [ MATCH "(11[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; }
+ # Intel C++ Compiler 11.0.
+ else if [ MATCH "(11[.]0|11)" : $(version) ]
+ { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; }
+ # Intel C++ Compiler 10.1.
+ else if [ MATCH "(10[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_10.1.013_x64/bin ; }
+ # Intel C++ Compiler 9.1.
+ else if [ MATCH "(9[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_91/bin ; }
+ # Intel C++ Compiler 9.0.
+ else if [ MATCH "(9[.]0|9)" : $(version) ]
+ { default_path = /opt/intel_cc_90/bin ; }
+ # Intel C++ Compiler 8.1.
+ else if [ MATCH "(8[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_81/bin ; }
+ # Intel C++ Compiler 8.0 - this used to be the default, so now it's the
+ # fallback.
+ else
+ { default_path = /opt/intel_cc_80/bin ; }
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: default search path for intel-linux is" $(default_path) ;
+ }
+
+ command = [ common.get-invocation-command intel-linux : icpc
+ : $(command) : $(default_path) ] ;
+
+ common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags intel-linux gnu $(condition) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ local command-string = $(command:J=" ") ;
+ local version-output = [ SHELL "$(command-string) --version" ] ;
+ local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ;
+ local major = [ MATCH "([0-9]+).*" : $(real-version) ] ;
+
+ # If we failed to determine major version, use the behaviour for
+ # the current compiler.
+ if $(major) && [ numbers.less $(major) 10 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else if $(major) && [ numbers.less $(major) 11 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else # newer version of intel do have -Os (at least 11+, don't know about 10)
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
+ }
+ flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+}
+
+SPACE = " " ;
+
+flags intel-linux.compile OPTIONS <warnings>off : -w0 ;
+flags intel-linux.compile OPTIONS <warnings>on : -w1 ;
+flags intel-linux.compile OPTIONS <warnings>all : -w2 ;
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+#
+# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler
+# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi
+# etc - which appear not to do anything except take up disk space :-(
+#
+actions compile.c++.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+
+
diff --git a/jam-files/boost-build/tools/intel-win.jam b/jam-files/boost-build/tools/intel-win.jam
new file mode 100644
index 000000000..691b5dce9
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-win.jam
@@ -0,0 +1,184 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Importing common is needed because the rules we inherit here depend on it.
+# That is nasty.
+import common ;
+import errors ;
+import feature ;
+import intel ;
+import msvc ;
+import os ;
+import toolset ;
+import generators ;
+import type ;
+
+feature.extend-subfeature toolset intel : platform : win ;
+
+toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
+toolset.inherit-flags intel-win : msvc : : YLOPTION ;
+toolset.inherit-rules intel-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
+generators.override intel-win.compile.rc : rc.compile.resource ;
+generators.override intel-win.compile.mc : mc.compile ;
+
+toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
+
+toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
+
+# Initializes the intel toolset for windows
+rule init ( version ? : # the compiler version
+ command * : # the command to invoke the compiler itself
+ options * # Additional option: <compatibility>
+ # either 'vc6', 'vc7', 'vc7.1'
+ # or 'native'(default).
+ )
+{
+ local compatibility =
+ [ feature.get-values <compatibility> : $(options) ] ;
+ local condition = [ common.check-init-parameters intel-win
+ : version $(version) : compatibility $(compatibility) ] ;
+
+ command = [ common.get-invocation-command intel-win : icl.exe :
+ $(command) ] ;
+
+ common.handle-options intel-win : $(condition) : $(command) : $(options) ;
+
+ local root ;
+ if $(command)
+ {
+ root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root = $(root)/ ;
+ }
+
+ local setup ;
+ setup = [ GLOB $(root) : iclvars_*.bat ] ;
+ if ! $(setup)
+ {
+ setup = $(root)/iclvars.bat ;
+ }
+ setup = "call \""$(setup)"\" > nul " ;
+
+ if [ os.name ] = NT
+ {
+ setup = $(setup)"
+" ;
+ }
+ else
+ {
+ setup = "cmd /S /C "$(setup)" \"&&\" " ;
+ }
+
+ toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ;
+ toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ;
+ toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ;
+ toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ;
+ toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ;
+ toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ;
+
+ local m = [ MATCH (.).* : $(version) ] ;
+ local major = $(m[1]) ;
+
+ local C++FLAGS ;
+
+ C++FLAGS += /nologo ;
+
+ # Reduce the number of spurious error messages
+ C++FLAGS += /Qwn5 /Qwd985 ;
+
+ # Enable ADL
+ C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
+
+ # Disable Microsoft "secure" overloads in Dinkumware libraries since they
+ # cause compile errors with Intel versions 9 and 10.
+ C++FLAGS += -D_SECURE_SCL=0 ;
+
+ if $(major) > 5
+ {
+ C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
+ }
+
+ # Add options recognized only by intel7 and above.
+ if $(major) >= 7
+ {
+ C++FLAGS += /Qansi_alias ;
+ }
+
+ if $(compatibility) = vc6
+ {
+ C++FLAGS +=
+ # Emulate VC6
+ /Qvc6
+
+ # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
+ # compatibility-mode, wchar_t is not a distinct type from unsigned
+ # short.
+ -DBOOST_NO_INTRINSIC_WCHAR_T
+ ;
+ }
+ else
+ {
+ if $(major) > 5
+ {
+ # Add support for wchar_t
+ C++FLAGS += /Zc:wchar_t
+ # Tell the dinkumware library about it.
+ -D_NATIVE_WCHAR_T_DEFINED
+ ;
+ }
+ }
+
+ if $(compatibility) && $(compatibility) != native
+ {
+ C++FLAGS += /Q$(base-vc) ;
+ }
+ else
+ {
+ C++FLAGS +=
+ -Qoption,cpp,--arg_dep_lookup
+ # The following options were intended to disable the Intel compiler's
+ # 'bug-emulation' mode, but were later reported to be causing ICE with
+ # Intel-Win 9.0. It is not yet clear which options can be safely used.
+ # -Qoption,cpp,--const_string_literals
+ # -Qoption,cpp,--new_for_init
+ # -Qoption,cpp,--no_implicit_typename
+ # -Qoption,cpp,--no_friend_injection
+ # -Qoption,cpp,--no_microsoft_bugs
+ ;
+ }
+
+ toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
+ # By default, when creating PCH, intel adds 'i' to the explicitly
+ # specified name of the PCH file. Of course, Boost.Build is not
+ # happy when compiler produces not the file it was asked for.
+ # The option below stops this behaviour.
+ toolset.flags intel-win CFLAGS : -Qpchi- ;
+
+ if ! $(compatibility)
+ {
+ # If there's no backend version, assume 7.1.
+ compatibility = vc7.1 ;
+ }
+
+ local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
+ if ! $(extract-version)
+ {
+ errors.user-error "Invalid value for compatibility option:"
+ $(compatibility) ;
+ }
+
+ # Depending on the settings, running of tests require some runtime DLLs.
+ toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
+
+ msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ;
+}
+
+toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
+
+toolset.flags intel-win YLOPTION ;
+
diff --git a/jam-files/boost-build/tools/intel.jam b/jam-files/boost-build/tools/intel.jam
new file mode 100644
index 000000000..67038aa28
--- /dev/null
+++ b/jam-files/boost-build/tools/intel.jam
@@ -0,0 +1,34 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'intel' toolset. Depending on the current
+# system, it forwards either to 'intel-linux' or 'intel-win'
+# modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : intel ;
+feature.subfeature toolset intel : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using intel-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ toolset.using intel-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using intel-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/lex.jam b/jam-files/boost-build/tools/lex.jam
new file mode 100644
index 000000000..75d641318
--- /dev/null
+++ b/jam-files/boost-build/tools/lex.jam
@@ -0,0 +1,33 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+import generators ;
+import feature ;
+import property ;
+
+
+feature.feature flex.prefix : : free ;
+type.register LEX : l ;
+type.register LEX++ : ll ;
+generators.register-standard lex.lex : LEX : C ;
+generators.register-standard lex.lex : LEX++ : CPP ;
+
+rule init ( )
+{
+}
+
+rule lex ( target : source : properties * )
+{
+ local r = [ property.select flex.prefix : $(properties) ] ;
+ if $(r)
+ {
+ PREFIX on $(<) = $(r:G=) ;
+ }
+}
+
+actions lex
+{
+ flex -P$(PREFIX) -o$(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/make.jam b/jam-files/boost-build/tools/make.jam
new file mode 100644
index 000000000..085672857
--- /dev/null
+++ b/jam-files/boost-build/tools/make.jam
@@ -0,0 +1,72 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'make' main target rule.
+
+import "class" : new ;
+import errors : error ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import targets ;
+
+
+class make-target-class : basic-target
+{
+ import type regex virtual-target ;
+ import "class" : new ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local action-name = [ $(property-set).get <action> ] ;
+ # 'm' will always be set -- we add '@' ourselves in the 'make' rule
+ # below.
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
+ local t = [ new file-target $(self.name) exact : [ type.type
+ $(self.name) ] : $(self.project) : $(a) ] ;
+ return [ property-set.empty ] [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+# Declares the 'make' main target.
+#
+rule make ( target-name : sources * : generating-rule + : requirements * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ # The '@' sign causes the feature.jam module to qualify rule name with the
+ # module name of current project, if needed.
+ local m = [ MATCH ^(@).* : $(generating-rule) ] ;
+ if ! $(m)
+ {
+ generating-rule = @$(generating-rule) ;
+ }
+ requirements += <action>$(generating-rule) ;
+
+ targets.main-target-alternative
+ [ new make-target-class $(target-name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : make : : make ;
diff --git a/jam-files/boost-build/tools/mc.jam b/jam-files/boost-build/tools/mc.jam
new file mode 100644
index 000000000..578377735
--- /dev/null
+++ b/jam-files/boost-build/tools/mc.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Alexey Pakhunov.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Microsoft message compiler tool.
+# Notes:
+# - there's just message compiler tool, there's no tool for
+# extracting message strings from sources
+# - This file allows to use Microsoft message compiler
+# with any toolset. In msvc.jam, there's more specific
+# message compiling action.
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import toolset : flags ;
+import type ;
+import rc ;
+
+rule init ( )
+{
+}
+
+type.register MC : mc ;
+
+
+# Command line options
+feature mc-input-encoding : ansi unicode : free ;
+feature mc-output-encoding : unicode ansi : free ;
+feature mc-set-customer-bit : no yes : free ;
+
+flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ;
+flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ;
+flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ;
+flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>no : ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ;
+
+generators.register-standard mc.compile : MC : H RC ;
+
+actions compile
+{
+ mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
diff --git a/jam-files/boost-build/tools/message.jam b/jam-files/boost-build/tools/message.jam
new file mode 100644
index 000000000..212d8542c
--- /dev/null
+++ b/jam-files/boost-build/tools/message.jam
@@ -0,0 +1,55 @@
+# Copyright 2008 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target type 'message', that prints a message when built for the
+# first time.
+
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+
+class message-target-class : basic-target
+{
+ rule __init__ ( name-and-dir : project : * )
+ {
+ basic-target.__init__ $(name-and-dir) : $(project) ;
+ self.3 = $(3) ;
+ self.4 = $(4) ;
+ self.5 = $(5) ;
+ self.6 = $(6) ;
+ self.7 = $(7) ;
+ self.8 = $(8) ;
+ self.9 = $(9) ;
+ self.built = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ if ! $(self.built)
+ {
+ for i in 3 4 5 6 7 8 9
+ {
+ if $(self.$(i))
+ {
+ ECHO $(self.$(i)) ;
+ }
+ }
+ self.built = 1 ;
+ }
+
+ return [ property-set.empty ] ;
+ }
+}
+
+
+rule message ( name : * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new message-target-class $(name) : $(project)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ;
+}
+IMPORT $(__name__) : message : : message ; \ No newline at end of file
diff --git a/jam-files/boost-build/tools/midl.jam b/jam-files/boost-build/tools/midl.jam
new file mode 100644
index 000000000..0aa5dda31
--- /dev/null
+++ b/jam-files/boost-build/tools/midl.jam
@@ -0,0 +1,142 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import os ;
+import scanner ;
+import toolset : flags ;
+import type ;
+
+rule init ( )
+{
+}
+
+type.register IDL : idl ;
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register MSTYPELIB : tlb : H ;
+
+
+# Register scanner for MIDL files
+class midl-scanner : scanner
+{
+ import path property-set regex scanner type virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ self.includes = $(includes) ;
+
+ # List of quoted strings
+ self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re-import = "import"$(self.re-strings)"[ \t]*;" ;
+ self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+ }
+
+ rule pattern ( )
+ {
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ;
+ local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ;
+ local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ;
+ local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ local g2 = $(g)"#"$(b) ;
+
+ included-angle = $(included-angle:G=$(g)) ;
+ included-quoted = $(included-quoted:G=$(g2)) ;
+ imported = $(imported:G=$(g2)) ;
+ imported_tlbs = $(imported_tlbs:G=$(g2)) ;
+
+ local all = $(included-angle) $(included-quoted) $(imported) ;
+
+ INCLUDES $(target) : $(all) ;
+ DEPENDS $(target) : $(imported_tlbs) ;
+ NOCARE $(all) $(imported_tlbs) ;
+ SEARCH on $(included-angle) = $(self.includes:G=) ;
+ SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ;
+
+ scanner.propagate
+ [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :
+ $(included-angle) $(included-quoted) : $(target) ;
+
+ scanner.propagate $(__name__) : $(imported) : $(target) ;
+ }
+}
+
+scanner.register midl-scanner : include ;
+type.set-scanner IDL : midl-scanner ;
+
+
+# Command line options
+feature midl-stubless-proxy : yes no : propagated ;
+feature midl-robust : yes no : propagated ;
+
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ;
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ;
+
+# Architecture-specific options
+architecture-x86 = <architecture> <architecture>x86 ;
+address-model-32 = <address-model> <address-model>32 ;
+address-model-64 = <address-model> <address-model>64 ;
+
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ;
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ;
+flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ;
+
+
+flags midl.compile.idl DEFINES <define> ;
+flags midl.compile.idl UNDEFS <undef> ;
+flags midl.compile.idl INCLUDES <include> ;
+
+
+generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ;
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+TOUCH_FILE = [ common.file-touch-command ] ;
+
+actions compile.idl
+{
+ midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
+ $(TOUCH_FILE) "$(<[4]:W)"
+ $(TOUCH_FILE) "$(<[5]:W)"
+}
diff --git a/jam-files/boost-build/tools/mipspro.jam b/jam-files/boost-build/tools/mipspro.jam
new file mode 100644
index 000000000..417eaefcf
--- /dev/null
+++ b/jam-files/boost-build/tools/mipspro.jam
@@ -0,0 +1,145 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+
+feature.extend toolset : mipspro ;
+toolset.inherit mipspro : unix ;
+generators.override mipspro.prebuilt : builtin.lib-generator ;
+generators.override mipspro.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.sgi.com/products/software/irix/tools/
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters mipspro : version $(version) ] ;
+
+ command = [ common.get-invocation-command mipspro : CC : $(command) ] ;
+
+ common.handle-options mipspro : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local command = [
+ common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ;
+
+ command_f = $(command_f[1--2]) $(command[-1]:B=f77) ;
+ toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ;
+
+ # set link flags
+ flags mipspro.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ flags mipspro.link FINDLIBS-SA : [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ;
+generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ;
+generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ;
+
+cpu-arch-32 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32 ;
+
+cpu-arch-64 =
+ <architecture>/<address-model>64 ;
+
+flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ;
+
+# Declare flags and actions for compilation
+flags mipspro.compile OPTIONS <debug-symbols>on : -g ;
+# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags mipspro.compile OPTIONS <warnings>off : -w ;
+flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning
+flags mipspro.compile OPTIONS <warnings>all : -fullwarn ;
+flags mipspro.compile OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.compile OPTIONS <optimization>space : -O2 ;
+flags mipspro.compile OPTIONS <cflags> : -LANG:std ;
+flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
+flags mipspro.compile.c++ OPTIONS <cxxflags> ;
+flags mipspro.compile DEFINES <define> ;
+flags mipspro.compile INCLUDES <include> ;
+
+
+flags mipspro.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags mipspro.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+# flags mipspro.link OPTIONS <debug-symbols>off : -s ;
+# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ;
+# flags mipspro.link OPTIONS <threading>multi : -mt ;
+
+flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ;
+
+flags mipspro.link OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.link OPTIONS <optimization>space : -O2 ;
+flags mipspro.link OPTIONS <linkflags> ;
+flags mipspro.link LINKPATH <library-path> ;
+flags mipspro.link FINDLIBS-ST <find-static-library> ;
+flags mipspro.link FINDLIBS-SA <find-shared-library> ;
+flags mipspro.link FINDLIBS-SA <threading>multi : pthread ;
+flags mipspro.link LIBRARIES <library-file> ;
+flags mipspro.link LINK-RUNTIME <runtime-link>static : static ;
+flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags mipspro.link RPATH <dll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ ar -cr "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/mpi.jam b/jam-files/boost-build/tools/mpi.jam
new file mode 100644
index 000000000..0fe490bec
--- /dev/null
+++ b/jam-files/boost-build/tools/mpi.jam
@@ -0,0 +1,583 @@
+# Support for the Message Passing Interface (MPI)
+#
+# (C) Copyright 2005, 2006 Trustees of Indiana University
+# (C) Copyright 2005 Douglas Gregor
+#
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
+#
+# Authors: Douglas Gregor
+# Andrew Lumsdaine
+#
+# ==== MPI Configuration ====
+#
+# For many users, MPI support can be enabled simply by adding the following
+# line to your user-config.jam file:
+#
+# using mpi ;
+#
+# This should auto-detect MPI settings based on the MPI wrapper compiler in
+# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
+# has a different name, you can pass the name of the wrapper compiler as the
+# first argument to the mpi module:
+#
+# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
+#
+# If your MPI implementation does not have a wrapper compiler, or the MPI
+# auto-detection code does not work with your MPI's wrapper compiler,
+# you can pass MPI-related options explicitly via the second parameter to the
+# mpi module:
+#
+# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
+# <find-shared-library>mpi <find-shared-library>lam
+# <find-shared-library>dl ;
+#
+# To see the results of MPI auto-detection, pass "--debug-configuration" on
+# the bjam command line.
+#
+# The (optional) fourth argument configures Boost.MPI for running
+# regression tests. These parameters specify the executable used to
+# launch jobs (default: "mpirun") followed by any necessary arguments
+# to this to run tests and tell the program to expect the number of
+# processors to follow (default: "-np"). With the default parameters,
+# for instance, the test harness will execute, e.g.,
+#
+# mpirun -np 4 all_gather_test
+#
+# ==== Linking Against the MPI Libraries ===
+#
+# To link against the MPI libraries, import the "mpi" module and add the
+# following requirement to your target:
+#
+# <library>/mpi//mpi
+#
+# Since MPI support is not always available, you should check
+# "mpi.configured" before trying to link against the MPI libraries.
+
+import "class" : new ;
+import common ;
+import feature : feature ;
+import generators ;
+import os ;
+import project ;
+import property ;
+import testing ;
+import toolset ;
+import type ;
+import path ;
+
+# Make this module a project
+project.initialize $(__name__) ;
+project mpi ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Assuming the first part of the command line is the given prefix
+# followed by some non-empty value, remove the first argument. Returns
+# either nothing (if there was no prefix or no value) or a pair
+#
+# <name>value rest-of-cmdline
+#
+# This is a subroutine of cmdline_to_features
+rule add_feature ( prefix name cmdline )
+{
+ local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+
+ # If there was no value associated with the prefix, abort
+ if ! $(match) {
+ return ;
+ }
+
+ local value = $(match[1]) ;
+
+ if [ MATCH " +" : $(value) ] {
+ value = "\"$(value)\"" ;
+ }
+
+ return "<$(name)>$(value)" $(match[2]) ;
+}
+
+# Strip any end-of-line characters off the given string and return the
+# result.
+rule strip-eol ( string )
+{
+ local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
+
+ if $(match)
+ {
+ return $(match[1]) ;
+ }
+ else
+ {
+ return $(string) ;
+ }
+}
+
+# Split a command-line into a set of features. Certain kinds of
+# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
+# with their Boost.Build equivalents (e.g., <include>, <define>,
+# <library-path>, <find-library>). All other arguments are introduced
+# using the features in the unknown-features parameter, because we
+# don't know how to deal with them. For instance, if your compile and
+# correct. The incoming command line should be a string starting with
+# an executable (e.g., g++ -I/include/path") and may contain any
+# number of command-line arguments thereafter. The result is a list of
+# features corresponding to the given command line, ignoring the
+# executable.
+rule cmdline_to_features ( cmdline : unknown-features ? )
+{
+ local executable ;
+ local features ;
+ local otherflags ;
+ local result ;
+
+ unknown-features ?= <cxxflags> <linkflags> ;
+
+ # Pull the executable out of the command line. At this point, the
+ # executable is just thrown away.
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ executable = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # List the prefix/feature pairs that we will be able to transform.
+ # Every kind of parameter not mentioned here will be placed in both
+ # cxxflags and linkflags, because we don't know where they should go.
+ local feature_kinds-D = "define" ;
+ local feature_kinds-I = "include" ;
+ local feature_kinds-L = "library-path" ;
+ local feature_kinds-l = "find-shared-library" ;
+
+ while $(cmdline) {
+
+ # Check for one of the feature prefixes we know about. If we
+ # find one (and the associated value is nonempty), convert it
+ # into a feature.
+ local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
+ local matched ;
+ if $(match) && $(match[2]) {
+ local prefix = $(match[1]) ;
+ if $(feature_kinds$(prefix)) {
+ local name = $(feature_kinds$(prefix)) ;
+ local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
+
+ if $(add) {
+
+ if $(add[1]) = <find-shared-library>pthread
+ {
+ # Uhm. It's not really nice that this MPI implementation
+ # uses -lpthread as opposed to -pthread. We do want to
+ # set <threading>multi, instead of -lpthread.
+ result += "<threading>multi" ;
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else
+ {
+ result += $(add[1]) ;
+ }
+
+ cmdline = $(add[2]) ;
+ matched = yes ;
+ }
+ }
+ }
+
+ # If we haven't matched a feature prefix, just grab the command-line
+ # argument itself. If we can map this argument to a feature
+ # (e.g., -pthread -> <threading>multi), then do so; otherwise,
+ # and add it to the list of "other" flags that we don't
+ # understand.
+ if ! $(matched) {
+ match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ local value = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # Check for multithreading support
+ if $(value) = "-pthread" || $(value) = "-pthreads"
+ {
+ result += "<threading>multi" ;
+
+ # DPG: This is a hack intended to work around a BBv2 bug where
+ # requirements propagated from libraries are not checked for
+ # conflicts when BBv2 determines which "common" properties to
+ # apply to a target. In our case, the <threading>single property
+ # gets propagated from the common properties to Boost.MPI
+ # targets, even though <threading>multi is in the usage
+ # requirements of <library>/mpi//mpi.
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
+ otherflags += $(value) ;
+ }
+ }
+ }
+
+ # If there are other flags that we don't understand, add them to the
+ # result as both <cxxflags> and <linkflags>
+ if $(otherflags) {
+ for unknown in $(unknown-features)
+ {
+ result += "$(unknown)$(otherflags:J= )" ;
+ }
+ }
+
+ return $(result) ;
+}
+
+# Determine if it is safe to execute the given shell command by trying
+# to execute it and determining whether the exit code is zero or
+# not. Returns true for an exit code of zero, false otherwise.
+local rule safe-shell-command ( cmdline )
+{
+ local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
+ return [ MATCH ".*(SSCOK).*" : $(result) ] ;
+}
+
+# Initialize the MPI module.
+rule init ( mpicxx ? : options * : mpirun-with-options * )
+{
+ if ! $(options) && $(.debug-configuration)
+ {
+ ECHO "===============MPI Auto-configuration===============" ;
+ }
+
+ if ! $(mpicxx) && [ os.on-windows ]
+ {
+ # Try to auto-configure to the Microsoft Compute Cluster Pack
+ local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
+ local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
+ if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
+ }
+
+ # Pick up either the 32-bit or 64-bit library, depending on which address
+ # model the user has selected. Default to 32-bit.
+ options = <include>$(cluster_pack_path)/Include
+ <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
+ <library-path>$(cluster_pack_path)/Lib/i386
+ <find-static-library>msmpi
+ <toolset>msvc:<define>_SECURE_SCL=0
+ ;
+
+ # Setup the "mpirun" equivalent (mpiexec)
+ .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
+ .mpirun_flags = -n ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
+ }
+ }
+
+ if ! $(options)
+ {
+ # Try to auto-detect options based on the wrapper compiler
+ local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpiCC", which is used by MPICH
+ command = [ common.get-invocation-command mpi : mpiCC ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpicxx", which is used by OpenMPI and MPICH2
+ command = [ common.get-invocation-command mpi : mpicxx ] ;
+ }
+
+ local result ;
+ local compile_flags ;
+ local link_flags ;
+
+ if ! $(command)
+ {
+ # Do nothing: we'll complain later
+ }
+ # OpenMPI and newer versions of LAM-MPI have -showme:compile and
+ # -showme:link.
+ else if [ safe-shell-command "$(command) -showme:compile" ] &&
+ [ safe-shell-command "$(command) -showme:link" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ SHELL "$(command) -showme:compile" ] ;
+ link_flags = [ SHELL "$(command) -showme:link" ] ;
+
+ # Prepend COMPILER as the executable name, to match the format of
+ # other compilation commands.
+ compile_flags = "COMPILER $(compile_flags)" ;
+ link_flags = "COMPILER $(link_flags)" ;
+ }
+ # Look for LAM-MPI's -showme
+ else if [ safe-shell-command "$(command) -showme" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
+ }
+
+ result = [ SHELL "$(command) -showme" ] ;
+ }
+ # Look for MPICH
+ else if [ safe-shell-command "$(command) -show" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found MPICH wrapper compiler: $(command)" ;
+ }
+ compile_flags = [ SHELL "$(command) -compile_info" ] ;
+ link_flags = [ SHELL "$(command) -link_info" ] ;
+ }
+ # Sun HPC and Ibm POE
+ else if [ SHELL "$(command) -v 2>/dev/null" ]
+ {
+ compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
+
+ local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
+ if $(back)
+ {
+ # Sun HPC
+ if $(.debug-configuration)
+ {
+ ECHO "Found Sun MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
+ compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
+ link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
+ link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
+
+ # strip out -v from compile options
+ local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
+ back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ }
+ else
+ {
+ # Ibm POE
+ if $(.debug-configuration)
+ {
+ ECHO "Found IBM MPI wrapper compiler: $(command)" ;
+ }
+
+ #
+ compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
+ compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
+ local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
+ back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ link_flags = $(compile_flags) ;
+
+ # get location of mpif.h from mpxlf
+ local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
+ f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
+ front = [ MATCH "(.*)-v" : $(f_flags) ] ;
+ back = [ MATCH "-v(.*)" : $(f_flags) ] ;
+ f_flags = "$(front) $(back)" ;
+ f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
+ f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ compile_flags = "$(compile_flags) $(f_flags)" ;
+ }
+ }
+
+ if $(result) || $(compile_flags) && $(link_flags)
+ {
+ if $(result)
+ {
+ result = [ strip-eol $(result) ] ;
+ options = [ cmdline_to_features $(result) ] ;
+ }
+ else
+ {
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ link_flags = [ strip-eol $(link_flags) ] ;
+
+ # Separately process compilation and link features, then combine
+ # them at the end.
+ local compile_features = [ cmdline_to_features $(compile_flags)
+ : "<cxxflags>" ] ;
+ local link_features = [ cmdline_to_features $(link_flags)
+ : "<linkflags>" ] ;
+ options = $(compile_features) $(link_features) ;
+ }
+
+ # If requested, display MPI configuration information.
+ if $(.debug-configuration)
+ {
+ if $(result)
+ {
+ ECHO " Wrapper compiler command line: $(result)" ;
+ }
+ else
+ {
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(compile_flags) ] ;
+ ECHO "MPI compilation flags: $(match[2])" ;
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(link_flags) ] ;
+ ECHO "MPI link flags: $(match[2])" ;
+ }
+ }
+ }
+ else
+ {
+ if $(command)
+ {
+ ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
+ ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
+ }
+ else if $(mpicxx)
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
+ }
+ else
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
+ }
+ ECHO "You will need to manually configure MPI support." ;
+ }
+
+ }
+
+ # Find mpirun (or its equivalent) and its flags
+ if ! $(.mpirun)
+ {
+ .mpirun =
+ [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
+ .mpirun_flags = $(mpirun-with-options[2-]) ;
+ .mpirun_flags ?= -np ;
+ }
+
+ if $(.debug-configuration)
+ {
+ if $(options)
+ {
+ echo "MPI build features: " ;
+ ECHO $(options) ;
+ }
+
+ if $(.mpirun)
+ {
+ echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
+ }
+
+ ECHO "====================================================" ;
+ }
+
+ if $(options)
+ {
+ .configured = true ;
+
+ # Set up the "mpi" alias
+ alias mpi : : : : $(options) ;
+ }
+}
+
+# States whether MPI has bee configured
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+# Returs the "extra" requirements needed to build MPI. These requirements are
+# part of the /mpi//mpi library target, but they need to be added to anything
+# that uses MPI directly to work around bugs in BBv2's propagation of
+# requirements.
+rule extra-requirements ( )
+{
+ return $(MPI_EXTRA_REQUIREMENTS) ;
+}
+
+# Support for testing; borrowed from Python
+type.register RUN_MPI_OUTPUT ;
+type.register RUN_MPI : : TEST ;
+
+class mpi-test-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ # Generate an executable from the sources. This is the executable we will run.
+ local executable =
+ [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
+
+ result =
+ [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
+ }
+}
+
+# Use mpi-test-generator to generate MPI tests from sources
+generators.register
+ [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_MPI_OUTPUT : RUN_MPI ;
+
+# The number of processes to spawn when executing an MPI test.
+feature mpi:processes : : free incidental ;
+
+# The flag settings on testing.capture-output do not
+# apply to mpi.capture output at the moment.
+# Redo this explicitly.
+toolset.flags mpi.capture-output ARGS <testing.arg> ;
+rule capture-output ( target : sources * : properties * )
+{
+ # Use the standard capture-output rule to run the tests
+ testing.capture-output $(target) : $(sources[1]) : $(properties) ;
+
+ # Determine the number of processes we should run on.
+ local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
+ num_processes = $(num_processes:G=) ;
+
+ # serialize the MPI tests to avoid overloading systems
+ JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
+
+ # We launch MPI processes using the "mpirun" equivalent specified by the user.
+ LAUNCHER on $(target) =
+ [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
+}
+
+# Creates a set of test cases to be run through the MPI launcher. The name, sources,
+# and requirements are the same as for any other test generator. However, schedule is
+# a list of numbers, which indicates how many processes each test run will use. For
+# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
+# 7 processes. The name provided is just the base name: the actual tests will be
+# the name followed by a hypen, then the number of processes.
+rule mpi-test ( name : sources * : requirements * : schedule * )
+{
+ sources ?= $(name).cpp ;
+ schedule ?= 1 2 3 4 7 8 13 17 ;
+
+ local result ;
+ for processes in $(schedule)
+ {
+ result += [ testing.make-test
+ run-mpi : $(sources) /boost/mpi//boost_mpi
+ : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
+ }
+ return $(result) ;
+}
diff --git a/jam-files/boost-build/tools/msvc-config.jam b/jam-files/boost-build/tools/msvc-config.jam
new file mode 100644
index 000000000..6c71e3b00
--- /dev/null
+++ b/jam-files/boost-build/tools/msvc-config.jam
@@ -0,0 +1,12 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for VisualStudio toolset. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ;
+
+using msvc : all ;
+
diff --git a/jam-files/boost-build/tools/msvc.jam b/jam-files/boost-build/tools/msvc.jam
new file mode 100644
index 000000000..e33a66d22
--- /dev/null
+++ b/jam-files/boost-build/tools/msvc.jam
@@ -0,0 +1,1392 @@
+# Copyright (c) 2003 David Abrahams.
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2006 Bojan Resnik.
+# Copyright (c) 2006 Ilya Sokolov.
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import mc ;
+import midl ;
+import os ;
+import path ;
+import pch ;
+import property ;
+import rc ;
+import toolset ;
+import type ;
+
+
+type.register MANIFEST : manifest ;
+feature.feature embed-manifest : on off : incidental propagated ;
+
+type.register PDB : pdb ;
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be ommited:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+#
+rule init (
+ # The msvc version being configured. When omitted the tools invoked when no
+ # explicit version is given will be configured.
+ version ?
+
+ # The command used to invoke the compiler. If not specified:
+ # - if version is given, default location for that version will be
+ # searched
+ #
+ # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
+ # and 6.* will be searched
+ #
+ # - if compiler is not found in the default locations, PATH will be
+ # searched.
+ : command *
+
+ # Options may include:
+ #
+ # All options shared by multiple toolset types as handled by the
+ # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
+ # <fflags> & <linkflags>.
+ #
+ # <assembler>
+ # <compiler>
+ # <idl-compiler>
+ # <linker>
+ # <mc-compiler>
+ # <resource-compiler>
+ # Exact tool names to be used by this msvc toolset configuration.
+ #
+ # <compiler-filter>
+ # Command through which to pipe the output of running the compiler.
+ # For example to pass the output to STLfilt.
+ #
+ # <setup>
+ # Global setup command to invoke before running any of the msvc tools.
+ # It will be passed additional option parameters depending on the actual
+ # target platform.
+ #
+ # <setup-amd64>
+ # <setup-i386>
+ # <setup-ia64>
+ # Platform specific setup command to invoke before running any of the
+ # msvc tools used when builing a target for a specific platform, e.g.
+ # when building a 32 or 64 bit executable.
+ : options *
+)
+{
+ if $(command)
+ {
+ options += <command>$(command) ;
+ }
+ configure $(version) : $(options) ;
+}
+
+
+# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
+# a part of the 'options' list. See the 'init' rule comment for more detailed
+# information.
+#
+rule configure ( version ? : options * )
+{
+ switch $(version)
+ {
+ case "all" :
+ if $(options)
+ {
+ errors.error "MSVC toolset configuration: options should be"
+ "empty when '$(version)' is specified." ;
+ }
+
+ # Configure (i.e. mark as used) all registered versions.
+ local all-versions = [ $(.versions).all ] ;
+ if ! $(all-versions)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] Asked to configure all registered"
+ "msvc toolset versions when there are none currently"
+ "registered." ;
+ }
+ }
+ else
+ {
+ for local v in $(all-versions)
+ {
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure-really $(v) ;
+ }
+ }
+
+ case "default" :
+ configure-really : $(options) ;
+
+ case * :
+ configure-really $(version) : $(options) ;
+ }
+}
+
+
+# Sets up flag definitions dependent on the compiler version used.
+# - 'version' is the version of compiler in N.M format.
+# - 'conditions' is the property set to be used as flag conditions.
+# - 'toolset' is the toolset for which flag settings are to be defined.
+# This makes the rule reusable for other msvc-option-compatible compilers.
+#
+rule configure-version-specific ( toolset : version : conditions )
+{
+ toolset.push-checking-for-flags-module unchecked ;
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if ! [ MATCH ^(6\\.) : $(version) ]
+ {
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ;
+ toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
+
+ if [ MATCH ^([78]\\.) : $(version) ]
+ {
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
+ }
+ }
+
+ #
+ # Processor-specific optimization.
+ #
+
+ if [ MATCH ^([67]) : $(version) ]
+ {
+ # 8.0 deprecates some of the options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
+
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+ }
+ else
+ {
+ # 8.0 and above adds some more options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ;
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ;
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ;
+ }
+ toolset.pop-checking-for-flags-module ;
+}
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+#
+rule register-toolset ( )
+{
+ if ! msvc in [ feature.values toolset ]
+ {
+ register-toolset-really ;
+ }
+}
+
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if [ os.name ] in NT
+{
+ # The 'DEL' command would issue a message to stdout if the file does not
+ # exist, so need a check.
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+else
+{
+ actions archive
+ {
+ $(.RM) "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+actions compile.asm
+{
+ $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
+}
+
+
+rule compile.c ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.preprocess ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.pch ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets[1]) : -TC ;
+ get-rspline $(targets[2]) : -TC ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+toolset.flags msvc YLOPTION : "-Yl" ;
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# The linker will pull these into the executable's PDB
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# as in this case the compiler must be used to create a single PDB for our library.
+#
+actions compile-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+actions preprocess-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
+}
+
+rule compile-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+}
+
+rule preprocess-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+}
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+#
+# The global .escaped-double-quote variable is used to avoid messing up Emacs
+# syntax highlighting in the messy N-quoted code below.
+actions compile-c-c++-pch
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
+}
+
+
+# Action for running the C/C++ compiler using precompiled headers. An already
+# built source file for compiling the precompiled headers is expected to be
+# given as one of the source parameters.
+actions compile-c-c++-pch-s
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+
+rule compile.c++ ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+rule compile.c++.preprocess ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c++.pch ( targets + : sources * : properties * )
+{
+ get-rspline $(targets[1]) : -TP ;
+ get-rspline $(targets[2]) : -TP ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+
+# See midl.jam for details.
+#
+actions compile.idl
+{
+ $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
+ $(.TOUCH_FILE) "$(<[4]:W)"
+ $(.TOUCH_FILE) "$(<[5]:W)"
+}
+
+
+actions compile.mc
+{
+ $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
+
+
+actions compile.rc
+{
+ $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
+}
+
+
+rule link ( targets + : sources * : properties * )
+{
+ if <embed-manifest>on in $(properties)
+ {
+ msvc.manifest $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+ if <embed-manifest>on in $(properties)
+ {
+ msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if [ os.name ] in NT
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest.dll
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )
+ }
+}
+else
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
+ fi
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest.dll
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
+ fi
+ }
+}
+
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
+# puts all debug info into a single .pdb file named after the library
+#
+# Poking at source targets this way is probably not clean, but it's the
+# easiest approach.
+rule archive ( targets + : sources * : properties * )
+{
+ PDB_NAME on $(>) = $(<:S=.pdb) ;
+}
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class msvc-pch-generator : pch-generator
+{
+ import property-set ;
+
+ rule run-pch ( project name ? : property-set : sources * )
+ {
+ # Searching for the header and source file in the sources.
+ local pch-header ;
+ local pch-source ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ pch-header = $(s) ;
+ }
+ else if
+ [ type.is-derived [ $(s).type ] CPP ] ||
+ [ type.is-derived [ $(s).type ] C ]
+ {
+ pch-source = $(s) ;
+ }
+ }
+
+ if ! $(pch-header)
+ {
+ errors.user-error "can not build pch without pch-header" ;
+ }
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+
+ local generated = [ generator.run $(project) $(name)
+ : [ property-set.create
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ <pch-source>$(pch-source)
+ [ $(property-set).raw ] ]
+ : $(pch-header) ] ;
+
+ local pch-file ;
+ for local g in $(generated)
+ {
+ if [ type.is-derived [ $(g).type ] PCH ]
+ {
+ pch-file = $(g) ;
+ }
+ }
+
+ return [ property-set.create <pch-header>$(pch-header)
+ <pch-file>$(pch-file) ] $(generated) ;
+ }
+}
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '.known-versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+#
+local rule auto-detect-toolset-versions ( )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ # Get installation paths from the registry.
+ for local i in $(.known-versions)
+ {
+ if $(.version-$(i)-reg)
+ {
+ local vc-path ;
+ for local x in "" "Wow6432Node\\"
+ {
+ vc-path += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
+ : "ProductDir" ] ;
+ }
+
+ if $(vc-path)
+ {
+ vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
+ register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
+ }
+ }
+ }
+ }
+
+ # Check environment and default installation paths.
+ for local i in $(.known-versions)
+ {
+ if ! $(i) in [ $(.versions).all ]
+ {
+ register-configuration $(i) : [ default-path $(i) ] ;
+ }
+ }
+}
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+local rule configure-really ( version ? : options * )
+{
+ local v = $(version) ;
+
+ # Decide what the 'default' version is.
+ if ! $(v)
+ {
+ # Take the first registered (i.e. auto-detected) version.
+ version = [ $(.versions).all ] ;
+ version = $(version[1]) ;
+ v = $(version) ;
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ version ?= "default" ;
+ }
+
+ # Version alias -> real version number.
+ if $(.version-alias-$(version))
+ {
+ version = $(.version-alias-$(version)) ;
+ }
+
+ # Check whether the selected configuration is already in use.
+ if $(version) in [ $(.versions).used ]
+ {
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
+ {
+ errors.error "MSVC toolset configuration: Toolset version"
+ "'$(version)' already configured." ;
+ }
+ }
+ else
+ {
+ # Register a new configuration.
+ $(.versions).register $(version) ;
+
+ # Add user-supplied to auto-detected options.
+ options = [ $(.versions).get $(version) : options ] $(options) ;
+
+ # Mark the configuration as 'used'.
+ $(.versions).use $(version) ;
+
+ # Generate conditions and save them.
+ local conditions = [ common.check-init-parameters msvc : version $(v) ]
+ ;
+
+ $(.versions).set $(version) : conditions : $(conditions) ;
+
+ local command = [ feature.get-values <command> : $(options) ] ;
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = [ common.get-invocation-command msvc : cl.exe : $(command) :
+ [ default-paths $(version) ] : $(version) ] ;
+
+ common.handle-options msvc : $(conditions) : $(command) : $(options) ;
+
+ if ! $(version)
+ {
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
+ {
+ version = 10.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
+ {
+ version = 9.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
+ {
+ version = 8.0 ;
+ }
+ else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.1 ;
+ }
+ else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
+ $(command) ]
+ {
+ version = 7.1toolkit ;
+ }
+ else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.0 ;
+ }
+ else
+ {
+ version = 6.0 ;
+ }
+ }
+
+ # Generate and register setup command.
+
+ local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
+
+ local cpu = i386 amd64 ia64 ;
+ if $(below-8.0)
+ {
+ cpu = i386 ;
+ }
+
+ local setup-amd64 ;
+ local setup-i386 ;
+ local setup-ia64 ;
+
+ if $(command)
+ {
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+
+ if $(command)
+ {
+ local parent = [ path.make $(command) ] ;
+ parent = [ path.parent $(parent) ] ;
+ parent = [ path.native $(parent) ] ;
+
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or arhitecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ local global-setup = [ feature.get-values <setup> : $(options) ] ;
+ global-setup = $(global-setup[1]) ;
+ if ! $(below-8.0)
+ {
+ global-setup ?= [ locate-default-setup $(command) : $(parent) :
+ vcvarsall.bat ] ;
+ }
+
+ local default-setup-amd64 = vcvarsx86_amd64.bat ;
+ local default-setup-i386 = vcvars32.bat ;
+ local default-setup-ia64 = vcvarsx86_ia64.bat ;
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ local default-global-setup-options-amd64 = x86_amd64 ;
+ local default-global-setup-options-i386 = x86 ;
+ local default-global-setup-options-ia64 = x86_ia64 ;
+
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
+ {
+ default-global-setup-options-amd64 = amd64 ;
+ }
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ else if $(somehow-detect-the-itanium-platform)
+ {
+ default-global-setup-options-ia64 = ia64 ;
+ }
+
+ local setup-prefix = "call " ;
+ local setup-suffix = " >nul"$(.nl) ;
+ if ! [ os.name ] in NT
+ {
+ setup-prefix = "cmd.exe /S /C call " ;
+ setup-suffix = " \">nul\" \"&&\" " ;
+ }
+
+ for local c in $(cpu)
+ {
+ local setup-options ;
+
+ setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ;
+
+ if ! $(setup-$(c))-is-not-empty
+ {
+ if $(global-setup)-is-not-empty
+ {
+ setup-$(c) = $(global-setup) ;
+
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
+
+ setup-options ?= $(default-global-setup-options-$(c)) ;
+ }
+ else
+ {
+ setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ;
+ }
+ }
+
+ # Cygwin to Windows path translation.
+ setup-$(c) = "\""$(setup-$(c):W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ setup-options ?= "" ;
+ setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ;
+ }
+ }
+
+ # Get tool names (if any) and finish setup.
+
+ compiler = [ feature.get-values <compiler> : $(options) ] ;
+ compiler ?= cl ;
+
+ linker = [ feature.get-values <linker> : $(options) ] ;
+ linker ?= link ;
+
+ resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
+ resource-compiler ?= rc ;
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ local default-assembler-amd64 = ml64 ;
+ local default-assembler-i386 = "ml -coff" ;
+ local default-assembler-ia64 = ias ;
+
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
+ idl-compiler ?= midl ;
+
+ mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
+ mc-compiler ?= mc ;
+
+ manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
+ manifest-tool ?= mt ;
+
+ local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
+
+ for local c in $(cpu)
+ {
+ # Setup script is not required in some configurations.
+ setup-$(c) ?= "" ;
+
+ local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
+
+ if $(.debug-configuration)
+ {
+ for local cpu-condition in $(cpu-conditions)
+ {
+ ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+
+ toolset.flags msvc.compile .CC $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ;
+ toolset.flags msvc.compile .RC $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
+ toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ;
+ toolset.flags msvc.link .LD $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ;
+ toolset.flags msvc.archive .LD $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO ;
+ toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
+ toolset.flags msvc.compile .MC $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
+
+ toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
+
+ if $(cc-filter)
+ {
+ toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
+ }
+ }
+
+ # Set version-specific flags.
+ configure-version-specific msvc : $(version) : $(conditions) ;
+ }
+}
+
+
+# Returns the default installation path for the given version.
+#
+local rule default-path ( version )
+{
+ # Use auto-detected path if possible.
+ local path = [ feature.get-values <command> : [ $(.versions).get $(version)
+ : options ] ] ;
+
+ if $(path)
+ {
+ path = $(path:D) ;
+ }
+ else
+ {
+ # Check environment.
+ if $(.version-$(version)-env)
+ {
+ local vc-path = [ os.environ $(.version-$(version)-env) ] ;
+ if $(vc-path)
+ {
+ vc-path = [ path.make $(vc-path) ] ;
+ vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ;
+ vc-path = [ path.native $(vc-path) ] ;
+
+ path = $(vc-path) ;
+ }
+ }
+
+ # Check default path.
+ if ! $(path) && $(.version-$(version)-path)
+ {
+ path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ;
+ }
+ }
+
+ return $(path) ;
+}
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+local rule default-paths ( version ? )
+{
+ local possible-paths ;
+
+ if $(version)
+ {
+ possible-paths += [ default-path $(version) ] ;
+ }
+ else
+ {
+ for local i in $(.known-versions)
+ {
+ possible-paths += [ default-path $(i) ] ;
+ }
+ }
+
+ return $(possible-paths) ;
+}
+
+
+rule get-rspline ( target : lang-opt )
+{
+ CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
+ $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
+ $(.nl)\"-I$(INCLUDES:W)\" ] ;
+}
+
+class msvc-linking-generator : linking-generator
+{
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local result = [ linking-generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+
+ if $(result)
+ {
+ local name-main = [ $(result[0]).name ] ;
+ local action = [ $(result[0]).action ] ;
+
+ if [ $(property-set).get <debug-symbols> ] = "on"
+ {
+ # We force exact name on PDB. The reason is tagging -- the tag rule may
+ # reasonably special case some target types, like SHARED_LIB. The tag rule
+ # will not catch PDB, and it cannot even easily figure if PDB is paired with
+ # SHARED_LIB or EXE or something else. Because PDB always get the
+ # same name as the main target, with .pdb as extension, just force it.
+ local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ] ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+
+ if [ $(property-set).get <embed-manifest> ] = "off"
+ {
+ # Manifest is evil target. It has .manifest appened to the name of
+ # main target, including extension. E.g. a.exe.manifest. We use 'exact'
+ # name because to achieve this effect.
+ local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ] ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+#
+local rule register-toolset-really ( )
+{
+ feature.extend toolset : msvc ;
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature toolset msvc : vendor : intel : propagated optional ;
+
+ # Inherit MIDL flags.
+ toolset.inherit-flags msvc : midl ;
+
+ # Inherit MC flags.
+ toolset.inherit-flags msvc : mc ;
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add-requirements
+ <toolset>msvc,<runtime-link>shared:<threading>multi ;
+
+ # Declare msvc toolset specific features.
+ {
+ feature.feature debug-store : object database : propagated ;
+ feature.feature pch-source : : dependency free ;
+ }
+
+ # Declare generators.
+ {
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register [ new msvc-linking-generator
+ msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
+ generators.register [ new msvc-linking-generator
+ msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ;
+
+ generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
+ generators.override msvc.compile.rc : rc.compile.resource ;
+ generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
+
+ generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
+ generators.override msvc.compile.idl : midl.compile.idl ;
+
+ generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
+ generators.override msvc.compile.mc : mc.compile ;
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
+ generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
+
+ generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
+ generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
+ }
+
+ toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
+ toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
+ toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
+
+ #
+ # Declare flags for compilation.
+ #
+
+ toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
+ toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
+
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
+
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
+ toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
+ toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
+ toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
+ toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
+
+ toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
+ toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+ toolset.flags msvc.compile OPTIONS <cflags> : ;
+ toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
+
+ toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
+
+ toolset.flags msvc.compile DEFINES <define> ;
+ toolset.flags msvc.compile UNDEFS <undef> ;
+ toolset.flags msvc.compile INCLUDES <include> ;
+
+ # Declare flags for the assembler.
+ toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile.asm DEFINES <define> ;
+
+ # Declare flags for linking.
+ {
+ toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
+ toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
+ toolset.flags msvc.link DEF_FILE <def-file> ;
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
+
+ toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
+ toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ;
+ toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ;
+ toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ;
+ toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ;
+
+ toolset.flags msvc.link OPTIONS <linkflags> ;
+ toolset.flags msvc.link LINKPATH <library-path> ;
+
+ toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
+ toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
+ toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
+ toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+ }
+
+ toolset.flags msvc.archive AROPTIONS <archiveflags> ;
+}
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+local rule locate-default-setup ( command : parent : setup-name )
+{
+ local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
+ if $(result[1])
+ {
+ return $(result[1]) ;
+ }
+}
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+local rule register-configuration ( version : path ? )
+{
+ if $(path)
+ {
+ local command = [ GLOB $(path) : cl.exe ] ;
+
+ if $(command)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ;
+ }
+
+ $(.versions).register $(version) ;
+ $(.versions).set $(version) : options : <command>$(command) ;
+ }
+ }
+}
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Miscellaneous constants.
+.RM = [ common.rm-command ] ;
+.nl = "
+" ;
+.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
+.escaped-double-quote = "\"" ;
+.TOUCH_FILE = [ common.file-touch-command ] ;
+
+# List of all registered configurations.
+.versions = [ new configurations ] ;
+
+# Supported CPU architectures.
+.cpu-arch-i386 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia64 =
+ <architecture>ia64/<address-model>
+ <architecture>ia64/<address-model>64 ;
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+.cpu-type-g5 = i586 pentium pentium-mmx ;
+.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
+ k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
+.cpu-type-em64t = prescott nocona conroe conroe-xe conroe-l allendale mermon
+ mermon-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx ;
+.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+.cpu-type-itanium = itanium itanium1 merced ;
+.cpu-type-itanium2 = itanium2 mckinley ;
+
+
+# Known toolset versions, in order of preference.
+.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
+
+# Version aliases.
+.version-alias-6 = 6.0 ;
+.version-alias-6.5 = 6.0 ;
+.version-alias-7 = 7.0 ;
+.version-alias-8 = 8.0 ;
+.version-alias-9 = 9.0 ;
+.version-alias-10 = 10.0 ;
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
+.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
+.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
+.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
+.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
+.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
+.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
+.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
+.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
+.version-7.1toolkit-env = VCToolkitInstallDir ;
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+.version-7.1toolkit-envpath = "bin" ;
+
+
+# Auto-detect all the available msvc installations on the system.
+auto-detect-toolset-versions ;
+
+
+# And finally trigger the actual Boost Build toolset registration.
+register-toolset ;
diff --git a/jam-files/boost-build/tools/notfile.jam b/jam-files/boost-build/tools/notfile.jam
new file mode 100644
index 000000000..97a5b0e87
--- /dev/null
+++ b/jam-files/boost-build/tools/notfile.jam
@@ -0,0 +1,74 @@
+# Copyright (c) 2005 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import project ;
+import targets ;
+import toolset ;
+import type ;
+
+
+type.register NOTFILE_MAIN ;
+
+
+class notfile-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local action ;
+ local action-name = [ $(property-set).get <action> ] ;
+
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ if $(m)
+ {
+ action = [ new action $(sources) : $(m[1])
+ : $(property-set) ] ;
+ }
+ else
+ {
+ action = [ new action $(sources) : notfile.run
+ : $(property-set) ] ;
+ }
+ return [ virtual-target.register
+ [ new notfile-target $(name) : $(project) : $(action) ] ] ;
+ }
+}
+
+
+generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
+
+
+toolset.flags notfile.run ACTION : <action> ;
+
+
+actions run
+{
+ $(ACTION)
+}
+
+
+rule notfile ( target-name : action + : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ requirements += <action>$(action) ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT $(__name__) : notfile : : notfile ;
diff --git a/jam-files/boost-build/tools/package.jam b/jam-files/boost-build/tools/package.jam
new file mode 100644
index 000000000..198c22315
--- /dev/null
+++ b/jam-files/boost-build/tools/package.jam
@@ -0,0 +1,165 @@
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides mechanism for installing whole packages into a specific directory
+# structure. This is opposed to the 'install' rule, that installs a number of
+# targets to a single directory, and does not care about directory structure at
+# all.
+
+# Example usage:
+#
+# package.install boost : <properties>
+# : <binaries>
+# : <libraries>
+# : <headers>
+# ;
+#
+# This will install binaries, libraries and headers to the 'proper' location,
+# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
+# --includedir.
+#
+# The rule is just a convenient wrapper, avoiding the need to define several
+# 'install' targets.
+#
+# The only install-related feature is <install-source-root>. It will apply to
+# headers only and if present, paths of headers relatively to source root will
+# be retained after installing. If it is not specified, then "." is assumed, so
+# relative paths in headers are always preserved.
+
+import "class" : new ;
+import option ;
+import project ;
+import feature ;
+import property ;
+import stage ;
+import targets ;
+import modules ;
+
+feature.feature install-default-prefix : : free incidental ;
+
+rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
+{
+ package-name ?= $(name) ;
+ if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of libdir/includir that
+ # is specified via options in config files.
+ option.set bindir : ;
+ option.set libdir : ;
+ option.set includedir : ;
+ }
+
+ # If <install-source-root> is not specified, all headers are installed to
+ # prefix/include, no matter what their relative path is. Sometimes that is
+ # what is needed.
+ local install-source-root = [ property.select <install-source-root> :
+ $(requirements) ] ;
+ install-source-root = $(install-source-root:G=) ;
+ requirements = [ property.change $(requirements) : <install-source-root> ] ;
+
+ local install-header-subdir = [ property.select <install-header-subdir> :
+ $(requirements) ] ;
+ install-header-subdir = /$(install-header-subdir:G=) ;
+ install-header-subdir ?= "" ;
+ requirements = [ property.change $(requirements) : <install-header-subdir> ]
+ ;
+
+ # First, figure out all locations. Use the default if no prefix option
+ # given.
+ local prefix = [ get-prefix $(name) : $(requirements) ] ;
+
+ # Architecture dependent files.
+ local exec-locate = [ option.get exec-prefix : $(prefix) ] ;
+
+ # Binaries.
+ local bin-locate = [ option.get bindir : $(prefix)/bin ] ;
+
+ # Object code libraries.
+ local lib-locate = [ option.get libdir : $(prefix)/lib ] ;
+
+ # Source header files.
+ local include-locate = [ option.get includedir : $(prefix)/include ] ;
+
+ stage.install $(name)-bin : $(binaries) : $(requirements)
+ <location>$(bin-locate) ;
+ alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ;
+
+ # Since the install location of shared libraries differs on universe
+ # and cygwin, use target alternatives to make different targets.
+ # We should have used indirection conditioanl requirements, but it's
+ # awkward to pass bin-locate and lib-locate from there to another rule.
+ alias $(name)-lib-shared : $(name)-lib-shared-universe ;
+ alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ;
+
+ # For shared libraries, we install both explicitly specified one and the
+ # shared libraries that the installed executables depend on.
+ stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+ stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements)
+ <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+
+ # For static libraries, we do not care about executable dependencies, since
+ # static libraries are already incorporated into them.
+ stage.install $(name)-lib-static : $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ;
+ stage.install $(name)-headers : $(headers) : $(requirements)
+ <location>$(include-locate)$(install-header-subdir)
+ <install-source-root>$(install-source-root) ;
+ alias $(name) : $(name)-bin $(name)-lib $(name)-headers ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static
+ $(1)-lib-shared-universe $(1)-lib-shared-cygwin ;
+ }
+}
+
+rule install-data ( target-name : package-name : data * : requirements * )
+{
+ package-name ?= target-name ;
+ if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of datarootdir
+ option.set datarootdir : ;
+ }
+
+ local prefix = [ get-prefix $(package-name) : $(requirements) ] ;
+ local datadir = [ option.get datarootdir : $(prefix)/share ] ;
+
+ stage.install $(target-name)
+ : $(data)
+ : $(requirements) <location>$(datadir)/$(package-name)
+ ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1) ;
+ }
+}
+
+local rule get-prefix ( package-name : requirements * )
+{
+ local prefix = [ option.get prefix : [ property.select
+ <install-default-prefix> : $(requirements) ] ] ;
+ prefix = $(prefix:G=) ;
+ requirements = [ property.change $(requirements) : <install-default-prefix>
+ ] ;
+ # Or some likely defaults if neither is given.
+ if ! $(prefix)
+ {
+ if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; }
+ else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
+ }
+ return $(prefix) ;
+}
+
diff --git a/jam-files/boost-build/tools/pathscale.jam b/jam-files/boost-build/tools/pathscale.jam
new file mode 100644
index 000000000..454e34547
--- /dev/null
+++ b/jam-files/boost-build/tools/pathscale.jam
@@ -0,0 +1,168 @@
+# Copyright 2006 Noel Belcourt
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+import fortran ;
+
+feature.extend toolset : pathscale ;
+toolset.inherit pathscale : unix ;
+generators.override pathscale.prebuilt : builtin.prebuilt ;
+generators.override pathscale.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pathscale.com/docs.html
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command pathscale : pathCC : $(command)
+ : /opt/ekopath/bin ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters pathscale
+ : version $(version) ] ;
+
+ common.handle-options pathscale : $(condition) : $(command) : $(options) ;
+
+ toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
+
+ toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
+ local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+ local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+
+ toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
+ toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pathscale.link FINDLIBS-SA : rt : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
+generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
+generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
+generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
+
+# Declare flags and actions for compilation
+flags pathscale.compile OPTIONS <optimization>off : -O0 ;
+flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
+flags pathscale.compile OPTIONS <optimization>space : -Os ;
+
+flags pathscale.compile OPTIONS <inlining>off : -noinline ;
+flags pathscale.compile OPTIONS <inlining>on : -inline ;
+flags pathscale.compile OPTIONS <inlining>full : -inline ;
+
+flags pathscale.compile OPTIONS <warnings>off : -woffall ;
+flags pathscale.compile OPTIONS <warnings>on : -Wall ;
+flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
+flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
+flags pathscale.compile OPTIONS <profiling>on : -pg ;
+flags pathscale.compile OPTIONS <link>shared : -fPIC ;
+flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
+flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
+
+flags pathscale.compile USER_OPTIONS <cflags> ;
+flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
+flags pathscale.compile DEFINES <define> ;
+flags pathscale.compile INCLUDES <include> ;
+
+flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
+flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.fortran90 ( targets * : sources * : properties * )
+{
+ # the space rule inserts spaces between targets and it's necessary
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the compile.fortran90 action
+ # F90 source must be compiled in a particular order so we
+ # serialize the build as a parallel F90 compile might fail
+ JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
+}
+
+actions compile.fortran90
+{
+ "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
+# Strip the binary when no debugging is needed
+flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
+flags pathscale.link OPTIONS <profiling>on : -pg ;
+flags pathscale.link USER_OPTIONS <linkflags> ;
+flags pathscale.link LINKPATH <library-path> ;
+flags pathscale.link FINDLIBS-ST <find-static-library> ;
+flags pathscale.link FINDLIBS-SA <find-shared-library> ;
+flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
+flags pathscale.link LIBRARIES <library-file> ;
+flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
+flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pathscale.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pathscale.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Declare action for creating static libraries
+# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
+actions piecemeal archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/pch.jam b/jam-files/boost-build/tools/pch.jam
new file mode 100644
index 000000000..0c6e98fac
--- /dev/null
+++ b/jam-files/boost-build/tools/pch.jam
@@ -0,0 +1,95 @@
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##### Using Precompiled Headers (Quick Guide) #####
+#
+# Make precompiled mypch.hpp:
+#
+# import pch ;
+#
+# cpp-pch mypch
+# : # sources
+# mypch.hpp
+# : # requiremnts
+# <toolset>msvc:<source>mypch.cpp
+# ;
+#
+# Add cpp-pch to sources:
+#
+# exe hello
+# : main.cpp hello.cpp mypch
+# ;
+
+import "class" : new ;
+import type ;
+import feature ;
+import generators ;
+
+type.register PCH : pch ;
+
+type.register C_PCH : : PCH ;
+type.register CPP_PCH : : PCH ;
+
+# Control precompiled header (PCH) generation.
+feature.feature pch :
+ on
+ off
+ : propagated ;
+
+
+feature.feature pch-header : : free dependency ;
+feature.feature pch-file : : free dependency ;
+
+# Base PCH generator. The 'run' method has the logic to prevent this generator
+# from being run unless it's being used for a top-level PCH target.
+class pch-generator : generator
+{
+ import property-set ;
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ if ! $(name)
+ {
+ # Unless this generator is invoked as the top-most generator for a
+ # main target, fail. This allows using 'H' type as input type for
+ # this generator, while preventing Boost.Build to try this generator
+ # when not explicitly asked for.
+ #
+ # One bad example is msvc, where pch generator produces both PCH
+ # target and OBJ target, so if there's any header generated (like by
+ # bison, or by msidl), we'd try to use pch generator to get OBJ from
+ # that H, which is completely wrong. By restricting this generator
+ # only to pch main target, such problem is solved.
+ }
+ else
+ {
+ local r = [ run-pch $(project) $(name)
+ : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
+ : $(sources) ] ;
+ return [ generators.add-usage-requirements $(r)
+ : <define>BOOST_BUILD_PCH_ENABLED ] ;
+ }
+ }
+
+ # This rule must be overridden by the derived classes.
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ }
+}
+
+
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+generators.register
+ [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
+generators.register
+ [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
diff --git a/jam-files/boost-build/tools/pgi.jam b/jam-files/boost-build/tools/pgi.jam
new file mode 100644
index 000000000..3a35c6447
--- /dev/null
+++ b/jam-files/boost-build/tools/pgi.jam
@@ -0,0 +1,147 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+import gcc ;
+
+feature.extend toolset : pgi ;
+toolset.inherit pgi : unix ;
+generators.override pgi.prebuilt : builtin.lib-generator ;
+generators.override pgi.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pgroup.com/resources/docs.htm
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters pgi : version $(version) ] ;
+
+ local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
+
+ common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ;
+
+ toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ flags pgi.compile DEFINES $(condition) :
+ [ feature.get-values <define> : $(options) ] : unchecked ;
+
+ # IOV_MAX support
+ flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
+
+ # set link flags
+ flags pgi.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pgi.link FINDLIBS-SA : rt [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+
+ gcc.init-link-flags pgi gnu $(condition) ;
+}
+
+# Declare generators
+generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
+generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
+generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
+
+# Declare flags and actions for compilation
+flags pgi.compile OPTIONS : -Kieee ;
+flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
+flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
+flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
+# flags pgi.compile OPTIONS <threading>multi : -mt ;
+
+flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
+flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
+
+flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
+
+flags pgi.compile OPTIONS <cflags> ;
+flags pgi.compile.c++ OPTIONS <cxxflags> ;
+flags pgi.compile DEFINES <define> ;
+flags pgi.compile INCLUDES <include> ;
+
+flags pgi.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
+# Strip the binary when no debugging is needed
+flags pgi.link OPTIONS <debug-symbols>off : -s ;
+flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.link OPTIONS <linkflags> ;
+flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.link LINKPATH <library-path> ;
+flags pgi.link FINDLIBS-ST <find-static-library> ;
+flags pgi.link FINDLIBS-SA <find-shared-library> ;
+flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
+flags pgi.link LIBRARIES <library-file> ;
+flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
+flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pgi.link RPATH <dll-path> ;
+
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pgi.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
+# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
+# Yod on redstorm refuses to load an executable that is dynamically linked.
+# removing the dynamic link options should get us where we need to be on redstorm.
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+actions updated together piecemeal pgi.archive
+{
+ ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
+}
+
diff --git a/jam-files/boost-build/tools/python-config.jam b/jam-files/boost-build/tools/python-config.jam
new file mode 100644
index 000000000..40aa825bc
--- /dev/null
+++ b/jam-files/boost-build/tools/python-config.jam
@@ -0,0 +1,27 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Python tools and librries. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 2.4 2.3 2.2
+ {
+ local python-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ;
+ local python-version = $(R) ;
+
+ if $(python-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ;
+ }
+ using python : $(python-version) : $(python-path) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/tools/python.jam b/jam-files/boost-build/tools/python.jam
new file mode 100644
index 000000000..66f2aabec
--- /dev/null
+++ b/jam-files/boost-build/tools/python.jam
@@ -0,0 +1,1267 @@
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Python and the the Boost.Python library.
+#
+# This module defines
+#
+# - a project 'python' with a target 'python' in it, that corresponds to the
+# python library
+#
+# - a main target rule 'python-extension' which can be used to build a python
+# extension.
+#
+# Extensions that use Boost.Python must explicitly link to it.
+
+import type ;
+import testing ;
+import generators ;
+import project ;
+import errors ;
+import targets ;
+import "class" : new ;
+import os ;
+import common ;
+import toolset ;
+import regex ;
+import numbers ;
+import string ;
+import property ;
+import sequence ;
+import path ;
+import feature ;
+import set ;
+import builtin ;
+import version ;
+
+
+# Make this module a project.
+project.initialize $(__name__) ;
+project python ;
+
+# Save the project so that if 'init' is called several times we define new
+# targets in the python project, not in whatever project we were called by.
+.project = [ project.current ] ;
+
+# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
+lib dl ;
+# This contains 'openpty' function need by python. Again, on some system need to
+# pass this to linker explicitly.
+lib util ;
+# Python uses pthread symbols.
+lib pthread ;
+# Extra library needed by phtread on some platforms.
+lib rt ;
+
+# The pythonpath feature specifies additional elements for the PYTHONPATH
+# environment variable, set by run-pyd. For example, pythonpath can be used to
+# access Python modules that are part of the product being built, but are not
+# installed in the development system's default paths.
+feature.feature pythonpath : : free optional path ;
+
+# Initializes the Python toolset. Note that all parameters are optional.
+#
+# - version -- the version of Python to use. Should be in Major.Minor format,
+# for example 2.3. Do not include the subminor version.
+#
+# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
+# Alternatively, the installation prefix for Python libraries and includes. If
+# empty, will be guessed from the version, the platform's installation
+# patterns, and the python executables that can be found in PATH.
+#
+# - includes: the include path to Python headers. If empty, will be guessed.
+#
+# - libraries: the path to Python library binaries. If empty, will be guessed.
+# On MacOS/Darwin, you can also pass the path of the Python framework.
+#
+# - condition: if specified, should be a set of properties that are matched
+# against the build configuration when Boost.Build selects a Python
+# configuration to use.
+#
+# - extension-suffix: A string to append to the name of extension modules before
+# the true filename extension. Ordinarily we would just compute this based on
+# the value of the <python-debugging> feature. However ubuntu's python-dbg
+# package uses the windows convention of appending _d to debug-build extension
+# modules. We have no way of detecting ubuntu, or of probing python for the
+# "_d" requirement, and if you configure and build python using
+# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
+# (or "_d" when targeting windows and <python-debugging> is set).
+#
+# Example usage:
+#
+# using python : 2.3 ;
+# using python : 2.3 : /usr/local/bin/python ;
+#
+rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
+ : condition * : extension-suffix ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message Configuring python... ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): \"$($(v))\" ;
+ }
+ }
+
+ configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
+
+ project.pop-current ;
+}
+
+# A simpler version of SHELL that grabs stderr as well as stdout, but returns
+# nothing if there was an error.
+#
+local rule shell-cmd ( cmd )
+{
+ debug-message running command '$(cmd)" 2>&1"' ;
+ x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
+ if $(x[2]) = 0
+ {
+ return $(x[1]) ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
+# executable from a native Windows build of bjam would be fatal to the bjam
+# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
+# that those are not also symlinks. ;-)
+#
+# If a symlink is found returns non-empty; we try to extract the target of the
+# symlink from the file and return that.
+#
+# Note: 1. only works on NT 2. path is a native path.
+local rule is-cygwin-symlink ( path )
+{
+ local is-symlink = ;
+
+ # Look for a file with the given path having the S attribute set, as cygwin
+ # symlinks do. /-C means "do not use thousands separators in file sizes."
+ local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
+
+ if $(dir-listing)
+ {
+ # Escape any special regex characters in the base part of the path.
+ local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ;
+
+ # Extract the file's size from the directory listing.
+ local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
+
+ # If the file has a reasonably small size, look for the special symlink
+ # identification text.
+ if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
+ {
+ local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
+ if $(link[2]) != 0
+ {
+ local nl = "
+
+" ;
+ is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
+ if $(is-symlink)
+ {
+ is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
+ is-symlink = $(is-symlink:R=$(path:D)) ;
+ }
+
+ }
+ }
+ }
+ return $(is-symlink) ;
+}
+
+
+# Append ext to each member of names that does not contain '.'.
+#
+local rule default-extension ( names * : ext * )
+{
+ local result ;
+ for local n in $(names)
+ {
+ switch $(n)
+ {
+ case *.* : result += $(n) ;
+ case * : result += $(n)$(ext) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Tries to determine whether invoking "cmd" would actually attempt to launch a
+# cygwin symlink.
+#
+# Note: only works on NT.
+#
+local rule invokes-cygwin-symlink ( cmd )
+{
+ local dirs = $(cmd:D) ;
+ if ! $(dirs)
+ {
+ dirs = . [ os.executable-path ] ;
+ }
+ local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
+ local paths = [ GLOB $(dirs) : $(base) ] ;
+ if $(paths)
+ {
+ # Make sure we have not run into a Cygwin symlink. Invoking such a file
+ # as an NT executable would be fatal for the bjam process.
+ return [ is-cygwin-symlink $(paths[1]) ] ;
+ }
+}
+
+
+local rule debug-message ( message * )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO notice: [python-cfg] $(message) ;
+ }
+}
+
+
+# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
+# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
+# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
+# its own area, under SOFTWARE\Wow6432node.
+#
+local rule software-registry-value ( path : data ? )
+{
+ local result ;
+ for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
+ {
+ for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
+ {
+ if ! $(result)
+ {
+ result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
+ }
+ }
+
+ }
+ return $(result) ;
+}
+
+
+.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ;
+.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ;
+
+.working-directory = [ PWD ] ;
+.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
+.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
+
+
+local rule windows-to-cygwin-path ( path )
+{
+ # If path is rooted with a drive letter, rewrite it using the /cygdrive
+ # mountpoint.
+ local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
+
+ # Else if path is rooted without a drive letter, use the working directory.
+ p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
+
+ # Else return the path unchanged.
+ return $(p:E=$(path:T)) ;
+}
+
+
+# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
+#
+local rule cygwin-to-windows-path ( path )
+{
+ path = $(path:R="") ; # strip any trailing slash
+
+ local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ;
+ if $(drive-letter)
+ {
+ path = $(drive-letter) ;
+ }
+ else if $(path:R=/x) = $(path) # already rooted?
+ {
+ # Look for a cygwin mount that includes each head sequence in $(path).
+ local head = $(path) ;
+ local tail = "" ;
+
+ while $(head)
+ {
+ local root = [ software-registry-value
+ "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
+
+ if $(root)
+ {
+ path = $(tail:R=$(root)) ;
+ head = ;
+ }
+ tail = $(tail:R=$(head:D=)) ;
+
+ if $(head) = /
+ {
+ head = ;
+ }
+ else
+ {
+ head = $(head:D) ;
+ }
+ }
+ }
+ return [ regex.replace $(path:R="") / \\ ] ;
+}
+
+
+# Convert a *nix path to native.
+#
+local rule *nix-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ path = [ cygwin-to-windows-path $(path) ] ;
+ }
+ return $(path) ;
+}
+
+
+# Convert an NT path to native.
+#
+local rule windows-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ windows-to-cygwin-path $(path) ] ;
+ }
+}
+
+
+# Return nonempty if path looks like a windows path, i.e. it starts with a drive
+# letter or contains backslashes.
+#
+local rule guess-windows-path ( path )
+{
+ return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ;
+}
+
+
+local rule path-to-native ( paths * )
+{
+ local result ;
+
+ for local p in $(paths)
+ {
+ if [ guess-windows-path $(p) ]
+ {
+ result += [ windows-path-to-native $(p) ] ;
+ }
+ else
+ {
+ result += [ *nix-path-to-native $(p:T) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+
+# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate
+# registry sub-keys, we have no way of finding a version with a 2-digit minor
+# version, e.g. 2.10 -- let us hope that never happens.
+#
+.version-countdown = ;
+for local v in [ numbers.range 15 30 ]
+{
+ .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
+}
+
+
+local rule windows-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+ local interpreters ;
+
+ for local v in $(version)
+ {
+ local install-path = [
+ software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
+
+ if $(install-path)
+ {
+ install-path = [ windows-path-to-native $(install-path) ] ;
+ debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
+ }
+
+ interpreters += $(:E=python:R=$(install-path)) ;
+ }
+ return $(interpreters) ;
+}
+
+
+local rule darwin-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+
+ local prefix
+ = [ GLOB /System/Library/Frameworks /Library/Frameworks
+ : Python.framework ] ;
+
+ return $(prefix)/Versions/$(version)/bin/python ;
+}
+
+
+# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
+# the information we care about from its "sys" module. Returns void if
+# unsuccessful.
+#
+local rule probe ( python-cmd )
+{
+ # Avoid invoking a Cygwin symlink on NT.
+ local skip-symlink ;
+ if [ os.name ] = NT
+ {
+ skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
+ }
+
+ if $(skip-symlink)
+ {
+ debug-message -------------------------------------------------------------------- ;
+ debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
+ debug-message causing a bjam built for Windows to hang. ;
+ debug-message ;
+ debug-message If you intend to target a Cygwin build of Python, please ;
+ debug-message replace the path to the link with the path to a real executable ;
+ debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ;
+ debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
+ debug-message backslashes ;
+ debug-message -------------------------------------------------------------------- ;
+ }
+ else
+ {
+ # Prepare a List of Python format strings and expressions that can be
+ # used to print the constants we want from the sys module.
+
+ # We do not really want sys.version since that is a complicated string,
+ # so get the information from sys.version_info instead.
+ local format = "version=%d.%d" ;
+ local exprs = "version_info[0]" "version_info[1]" ;
+
+ for local s in $(sys-elements[2-])
+ {
+ format += $(s)=%s ;
+ exprs += $(s) ;
+ }
+
+ # Invoke Python and ask it for all those values.
+ if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT )
+ {
+ # Prior to version 3.1.17 Boost Jam's SHELL command did not support
+ # quoted commands correctly on Windows. This means that on that
+ # platform we do not support using a Python command interpreter
+ # executable whose path contains a space character.
+ python-cmd = \"$(python-cmd)\" ;
+ }
+ local full-cmd =
+ $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
+
+ local output = [ shell-cmd $(full-cmd) ] ;
+ if $(output)
+ {
+ # Parse the output to get all the results.
+ local nl = "
+
+" ;
+ for s in $(sys-elements)
+ {
+ # These variables are expected to be declared local in the
+ # caller, so Jam's dynamic scoping will set their values there.
+ sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ;
+ }
+ }
+ return $(output) ;
+ }
+}
+
+
+# Make sure the "libraries" and "includes" variables (in an enclosing scope)
+# have a value based on the information given.
+#
+local rule compute-default-paths ( target-os : version ? : prefix ? :
+ exec-prefix ? )
+{
+ exec-prefix ?= $(prefix) ;
+
+ if $(target-os) = windows
+ {
+ # The exec_prefix is where you're supposed to look for machine-specific
+ # libraries.
+ local default-library-path = $(exec-prefix)\\libs ;
+ local default-include-path = $(:E=Include:R=$(prefix)) ;
+
+ # If the interpreter was found in a directory called "PCBuild" or
+ # "PCBuild8," assume we're looking at a Python built from the source
+ # distro, and go up one additional level to the default root. Otherwise,
+ # the default root is the directory where the interpreter was found.
+
+ # We ask Python itself what the executable path is in case of
+ # intermediate symlinks or shell scripts.
+ local executable-dir = $(sys.executable:D) ;
+
+ if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
+ {
+ debug-message "This Python appears to reside in a source distribution;" ;
+ debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
+
+ default-library-path = $(executable-dir) $(default-library-path) ;
+
+ default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
+
+ debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
+ }
+
+ libraries ?= $(default-library-path) ;
+ includes ?= $(default-include-path) ;
+ }
+ else
+ {
+ includes ?= $(prefix)/include/python$(version) ;
+
+ local lib = $(exec-prefix)/lib ;
+ libraries ?= $(lib)/python$(version)/config $(lib) ;
+ }
+}
+
+# The version of the python interpreter to use.
+feature.feature python : : propagated ;
+feature.feature python.interpreter : : free ;
+
+toolset.flags python.capture-output PYTHON : <python.interpreter> ;
+
+#
+# Support for Python configured --with-pydebug
+#
+feature.feature python-debugging : off on : propagated ;
+builtin.variant debug-python : debug : <python-debugging>on ;
+
+
+# Return a list of candidate commands to try when looking for a Python
+# interpreter. prefix is expected to be a native path.
+#
+local rule candidate-interpreters ( version ? : prefix ? : target-os )
+{
+ local bin-path = bin ;
+ if $(target-os) = windows
+ {
+ # On Windows, look in the root directory itself and, to work with the
+ # result of a build-from-source, the PCBuild directory.
+ bin-path = PCBuild8 PCBuild "" ;
+ }
+
+ bin-path = $(bin-path:R=$(prefix)) ;
+
+ if $(target-os) in windows darwin
+ {
+ return # Search:
+ $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
+ python # In the PATH
+ [ $(target-os)-installed-pythons $(version) ] # Standard install locations
+ ;
+ }
+ else
+ {
+ # Search relative to the prefix, or if none supplied, in PATH.
+ local unversioned = $(:E=python:R=$(bin-path:E=)) ;
+
+ # If a version was specified, look for a python with that specific
+ # version appended before looking for one called, simply, "python"
+ return $(unversioned)$(version) $(unversioned) ;
+ }
+}
+
+
+# Compute system library dependencies for targets linking with static Python
+# libraries.
+#
+# On many systems, Python uses libraries such as pthreads or libdl. Since static
+# libraries carry no library dependency information of their own that the linker
+# can extract, these extra dependencies have to be given explicitly on the link
+# line of the client. The information about these dependencies is packaged into
+# the "python" target below.
+#
+# Even where Python itself uses pthreads, it never allows extension modules to
+# be entered concurrently (unless they explicitly give up the interpreter lock).
+# Therefore, extension modules do not need the efficiency overhead of threadsafe
+# code as produced by <threading>multi, and we handle libpthread along with
+# other libraries here. Note: this optimization is based on an assumption that
+# the compiler generates link-compatible code in both the single- and
+# multi-threaded cases, and that system libraries do not change their ABIs
+# either.
+#
+# Returns a list of usage-requirements that link to the necessary system
+# libraries.
+#
+local rule system-library-dependencies ( target-os )
+{
+ switch $(target-os)
+ {
+ case s[uo][nl]* : # solaris, sun, sunos
+ # Add a librt dependency for the gcc toolset on SunOS (the sun
+ # toolset adds -lrt unconditionally). While this appears to
+ # duplicate the logic already in gcc.jam, it does not as long as
+ # we are not forcing <threading>multi.
+
+ # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
+ # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
+ # to be the right list for extension modules. For example, on my
+ # installation, adding -ldl causes at least one test to fail because
+ # the library can not be found and removing it causes no failures.
+
+ # Apparently, though, we need to add -lrt for gcc.
+ return <toolset>gcc:<library>rt ;
+
+ case osf : return <library>pthread <toolset>gcc:<library>rt ;
+
+ case qnx* : return ;
+ case darwin : return ;
+ case windows : return ;
+
+ case hpux : return <library>rt ;
+ case *bsd : return <library>pthread <toolset>gcc:<library>util ;
+
+ case aix : return <library>pthread <library>dl ;
+
+ case * : return <library>pthread <library>dl
+ <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
+ }
+}
+
+
+# Declare a target to represent Python's library.
+#
+local rule declare-libpython-target ( version ? : requirements * )
+{
+ # Compute the representation of Python version in the name of Python's
+ # library file.
+ local lib-version = $(version) ;
+ if <target-os>windows in $(requirements)
+ {
+ local major-minor = [ split-version $(version) ] ;
+ lib-version = $(major-minor:J="") ;
+ if <python-debugging>on in $(requirements)
+ {
+ lib-version = $(lib-version)_d ;
+ }
+ }
+
+ if ! $(lib-version)
+ {
+ ECHO *** warning: could not determine Python version, which will ;
+ ECHO *** warning: probably prevent us from linking with the python ;
+ ECHO *** warning: library. Consider explicitly passing the version ;
+ ECHO *** warning: to 'using python'. ;
+ }
+
+ # Declare it.
+ lib python.lib : : <name>python$(lib-version) $(requirements) ;
+}
+
+
+# Implementation of init.
+local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
+ condition * : extension-suffix ? )
+{
+ local prefix ;
+ local exec-prefix ;
+ local cmds-to-try ;
+ local interpreter-cmd ;
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+
+ if $(target-os) = windows && <python-debugging>on in $(condition)
+ {
+ extension-suffix ?= _d ;
+ }
+ extension-suffix ?= "" ;
+
+ # Normalize and dissect any version number.
+ local major-minor ;
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+
+ local cmds-to-try ;
+
+ if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
+ {
+ # If the user did not pass a command, whatever we got was a prefix.
+ prefix = $(cmd-or-prefix) ;
+ cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
+ }
+ else
+ {
+ # Work with the command the user gave us.
+ cmds-to-try = $(cmd-or-prefix) ;
+
+ # On Windows, do not nail down the interpreter command just yet in case
+ # the user specified something that turns out to be a cygwin symlink,
+ # which could bring down bjam if we invoke it.
+ if $(target-os) != windows
+ {
+ interpreter-cmd = $(cmd-or-prefix) ;
+ }
+ }
+
+ # Values to use in case we can not really find anything in the system.
+ local fallback-cmd = $(cmds-to-try[1]) ;
+ local fallback-version ;
+
+ # Anything left to find or check?
+ if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
+ {
+ # Values to be extracted from python's sys module. These will be set by
+ # the probe rule, above, using Jam's dynamic scoping.
+ local sys-elements = version platform prefix exec_prefix executable ;
+ local sys.$(sys-elements) ;
+
+ # Compute the string Python's sys.platform needs to match. If not
+ # targeting Windows or cygwin we will assume only native builds can
+ # possibly run, so we will not require a match and we leave sys.platform
+ # blank.
+ local platform ;
+ switch $(target-os)
+ {
+ case windows : platform = win32 ;
+ case cygwin : platform = cygwin ;
+ }
+
+ while $(cmds-to-try)
+ {
+ # Pop top command.
+ local cmd = $(cmds-to-try[1]) ;
+ cmds-to-try = $(cmds-to-try[2-]) ;
+
+ debug-message Checking interpreter command \"$(cmd)\"... ;
+ if [ probe $(cmd) ]
+ {
+ fallback-version ?= $(sys.version) ;
+
+ # Check for version/platform validity.
+ for local x in version platform
+ {
+ if $($(x)) && $($(x)) != $(sys.$(x))
+ {
+ debug-message ...$(x) "mismatch (looking for"
+ $($(x)) but found $(sys.$(x))")" ;
+ cmd = ;
+ }
+ }
+
+ if $(cmd)
+ {
+ debug-message ...requested configuration matched! ;
+
+ exec-prefix = $(sys.exec_prefix) ;
+
+ compute-default-paths $(target-os) : $(sys.version) :
+ $(sys.prefix) : $(sys.exec_prefix) ;
+
+ version = $(sys.version) ;
+ interpreter-cmd ?= $(cmd) ;
+ cmds-to-try = ; # All done.
+ }
+ }
+ else
+ {
+ debug-message ...does not invoke a working interpreter ;
+ }
+ }
+ }
+
+ # Anything left to compute?
+ if $(includes) && $(libraries)
+ {
+ .configured = true ;
+ }
+ else
+ {
+ version ?= $(fallback-version) ;
+ version ?= 2.5 ;
+ exec-prefix ?= $(prefix) ;
+ compute-default-paths $(target-os) : $(version) : $(prefix:E=) ;
+ }
+
+ if ! $(interpreter-cmd)
+ {
+ fallback-cmd ?= python ;
+ debug-message No working Python interpreter found. ;
+ if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
+ {
+ interpreter-cmd = $(fallback-cmd) ;
+ debug-message falling back to \"$(interpreter-cmd)\" ;
+ }
+ }
+
+ includes = [ path-to-native $(includes) ] ;
+ libraries = [ path-to-native $(libraries) ] ;
+
+ debug-message "Details of this Python configuration:" ;
+ debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
+ debug-message " include path:" \"$(includes:E=<empty>)\" ;
+ debug-message " library path:" \"$(libraries:E=<empty>)\" ;
+ if $(target-os) = windows
+ {
+ debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
+ }
+
+ #
+ # End autoconfiguration sequence.
+ #
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values python ]
+ {
+ feature.extend python : $(version) ;
+ }
+ target-requirements += <python>$(version:E=default) ;
+ }
+
+ target-requirements += <target-os>$(target-os) ;
+
+ # See if we can find a framework directory on darwin.
+ local framework-directory ;
+ if $(target-os) = darwin
+ {
+ # Search upward for the framework directory.
+ local framework-directory = $(libraries[-1]) ;
+ while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
+ {
+ framework-directory = $(framework-directory:D) ;
+ }
+
+ if $(framework-directory:D=) = Python.framework
+ {
+ debug-message framework directory is \"$(framework-directory)\" ;
+ }
+ else
+ {
+ debug-message "no framework directory found; using library path" ;
+ framework-directory = ;
+ }
+ }
+
+ local dll-path = $(libraries) ;
+
+ # Make sure that we can find the Python DLL on Windows.
+ if ( $(target-os) = windows ) && $(exec-prefix)
+ {
+ dll-path += $(exec-prefix) ;
+ }
+
+ #
+ # Prepare usage requirements.
+ #
+ local usage-requirements = [ system-library-dependencies $(target-os) ] ;
+ usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
+ if <python-debugging>on in $(condition)
+ {
+ if $(target-os) = windows
+ {
+ # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
+ # Py_DEBUG we will get multiple definition warnings.
+ usage-requirements += <define>_DEBUG ;
+ }
+ else
+ {
+ usage-requirements += <define>Py_DEBUG ;
+ }
+ }
+
+ # Global, but conditional, requirements to give access to the interpreter
+ # for general utilities, like other toolsets, that run Python scripts.
+ toolset.add-requirements
+ $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
+
+ # Register the right suffix for extensions.
+ register-extension-suffix $(extension-suffix) : $(target-requirements) ;
+
+ #
+ # Declare the "python" target. This should really be called
+ # python_for_embedding.
+ #
+
+ if $(framework-directory)
+ {
+ alias python
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <framework>$(framework-directory)
+ ;
+ }
+ else
+ {
+ declare-libpython-target $(version) : $(target-requirements) ;
+
+ # This is an evil hack. On, Windows, when Python is embedded, nothing
+ # seems to set up sys.path to include Python's standard library
+ # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
+ # here, aside from the workaround necessitated by Python's bug, is that:
+ #
+ # a. we're guessing the location of the python standard library from the
+ # location of pythonXX.lib
+ #
+ # b. we're hijacking the <testing.launcher> property to get the
+ # environment variable set up, and the user may want to use it for
+ # something else (e.g. launch the debugger).
+ local set-PYTHONPATH ;
+ if $(target-os) = windows
+ {
+ set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
+ $(libraries:D)/Lib ] ;
+ }
+
+ alias python
+ :
+ : $(target-requirements)
+ :
+ # Why python.lib must be listed here instead of along with the
+ # system libs is a mystery, but if we do not do it, on cygwin,
+ # -lpythonX.Y never appears in the command line (although it does on
+ # linux).
+ : $(usage-requirements)
+ <testing.launcher>$(set-PYTHONPATH)
+ <library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
+ ;
+ }
+
+ # On *nix, we do not want to link either Boost.Python or Python extensions
+ # to libpython, because the Python interpreter itself provides all those
+ # symbols. If we linked to libpython, we would get duplicate symbols. So
+ # declare two targets -- one for building extensions and another for
+ # embedding.
+ #
+ # Unlike most *nix systems, Mac OS X's linker does not permit undefined
+ # symbols when linking a shared library. So, we still need to link against
+ # the Python framework, even when building extensions. Note that framework
+ # builds of Python always use shared libraries, so we do not need to worry
+ # about duplicate Python symbols.
+ if $(target-os) in windows cygwin darwin
+ {
+ alias python_for_extensions : python : $(target-requirements) ;
+ }
+ # On AIX we need Python extensions and Boost.Python to import symbols from
+ # the Python interpreter. Dynamic libraries opened with dlopen() do not
+ # inherit the symbols from the Python interpreter.
+ else if $(target-os) = aix
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp
+ ;
+ }
+ else
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements)
+ ;
+ }
+}
+
+
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+
+type.register PYTHON_EXTENSION : : SHARED_LIB ;
+
+
+local rule register-extension-suffix ( root : condition * )
+{
+ local suffix ;
+
+ switch [ feature.get-values target-os : $(condition) ]
+ {
+ case windows : suffix = pyd ;
+ case cygwin : suffix = dll ;
+ case hpux :
+ {
+ if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
+ {
+ suffix = sl ;
+ }
+ else
+ {
+ suffix = so ;
+ }
+ }
+ case * : suffix = so ;
+ }
+
+ type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
+}
+
+
+# Unset 'lib' prefix for PYTHON_EXTENSION
+type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
+
+
+rule python-extension ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if [ configured ]
+ {
+ requirements += <use>/python//python_for_extensions ;
+ }
+ requirements += <suppress-import-lib>true ;
+
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT python : python-extension : : python-extension ;
+
+rule py2to3
+{
+ common.copy $(>) $(<) ;
+ 2to3 $(<) ;
+}
+
+actions 2to3
+{
+ 2to3 -wn "$(<)"
+ 2to3 -dwn "$(<)"
+}
+
+
+# Support for testing.
+type.register PY : py ;
+type.register RUN_PYD_OUTPUT ;
+type.register RUN_PYD : : TEST ;
+
+
+class python-test-generator : generator
+{
+ import set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local pyversion = [ $(property-set).get <python> ] ;
+ local python ;
+ local other-pythons ;
+
+ # Make new target that converting Python source by 2to3 when running with Python 3.
+ local rule make-2to3-source ( source )
+ {
+ if $(pyversion) >= 3.0
+ {
+ local a = [ new action $(source) : python.py2to3 : $(property-set) ] ;
+ local t = [ utility.basename [ $(s).name ] ] ;
+ local p = [ new file-target $(t) : PY : $(project) : $(a) ] ;
+ return $(p) ;
+ }
+ else
+ {
+ return $(source) ;
+ }
+ }
+
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PY
+ {
+ if ! $(python)
+ {
+ # First Python source ends up on command line.
+ python = [ make-2to3-source $(s) ] ;
+
+ }
+ else
+ {
+ # Other Python sources become dependencies.
+ other-pythons += [ make-2to3-source $(s) ] ;
+ }
+ }
+ }
+
+ local extensions ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PYTHON_EXTENSION
+ {
+ extensions += $(s) ;
+ }
+ }
+
+ local libs ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] LIB ]
+ && ! $(s) in $(extensions)
+ {
+ libs += $(s) ;
+ }
+ }
+
+ local new-sources ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] CPP ]
+ {
+ local name = [ utility.basename [ $(s).name ] ] ;
+ if $(name) = [ utility.basename [ $(python).name ] ]
+ {
+ name = $(name)_ext ;
+ }
+ local extension = [ generators.construct $(project) $(name) :
+ PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
+
+ # The important part of usage requirements returned from
+ # PYTHON_EXTENSION generator are xdll-path properties that will
+ # allow us to find the python extension at runtime.
+ property-set = [ $(property-set).add $(extension[1]) ] ;
+
+ # Ignore usage requirements. We're a top-level generator and
+ # nobody is going to use what we generate.
+ new-sources += $(extension[2-]) ;
+ }
+ }
+
+ property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
+
+ result = [ construct-result $(python) $(extensions) $(new-sources) :
+ $(project) $(name) : $(property-set) ] ;
+ }
+}
+
+
+generators.register
+ [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_PYD_OUTPUT : RUN_PYD ;
+
+
+# There are two different ways of spelling OS names. One is used for [ os.name ]
+# and the other is used for the <host-os> and <target-os> properties. Until that
+# is remedied, this sets up a crude mapping from the latter to the former, that
+# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
+# of a better name than "translate".
+#
+.translate-os-windows = NT ;
+.translate-os-cygwin = CYGWIN ;
+local rule translate-os ( src-os )
+{
+ local x = $(.translate-os-$(src-os)) [ os.name ] ;
+ return $(x[1]) ;
+}
+
+
+# Extract the path to a single ".pyd" source. This is used to build the
+# PYTHONPATH for running bpl tests.
+#
+local rule pyd-pythonpath ( source )
+{
+ return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
+}
+
+
+# The flag settings on testing.capture-output do not apply to python.capture
+# output at the moment. Redo this explicitly.
+toolset.flags python.capture-output ARGS <testing.arg> ;
+
+
+rule capture-output ( target : sources * : properties * )
+{
+ # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
+ # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
+ # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
+ # consulted. Move it over explicitly.
+ RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
+
+ PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
+ PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
+
+ # After test is run, we remove the Python module, but not the Python script.
+ testing.capture-output $(target) : $(sources[1]) : $(properties) :
+ $(sources[2-]) ;
+
+ # PYTHONPATH is different; it will be interpreted by whichever Python is
+ # invoked and so must follow path rules for the target os. The only OSes
+ # where we can run python for other OSes currently are NT and CYGWIN so we
+ # only need to handle those cases.
+ local target-os = [ feature.get-values target-os : $(properties) ] ;
+ # Oddly, host-os is not in properties, so grab the default value.
+ local host-os = [ feature.defaults host-os ] ;
+ host-os = $(host-os:G=) ;
+ if $(target-os) != $(host-os)
+ {
+ PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
+ $(PYTHONPATH) ] ;
+ }
+ local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
+ local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
+ $(PYTHONPATH:J=$(path-separator)) ] ;
+ LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
+}
+
+
+rule bpl-test ( name : sources * : requirements * )
+{
+ local s ;
+ sources ?= $(name).py $(name).cpp ;
+ return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
+ : $(requirements) : $(name) ] ;
+}
+
+
+IMPORT $(__name__) : bpl-test : : bpl-test ;
diff --git a/jam-files/boost-build/tools/qcc.jam b/jam-files/boost-build/tools/qcc.jam
new file mode 100644
index 000000000..4f2a4fc14
--- /dev/null
+++ b/jam-files/boost-build/tools/qcc.jam
@@ -0,0 +1,236 @@
+# Copyright (c) 2001 David Abrahams.
+# Copyright (c) 2002-2003 Rene Rivera.
+# Copyright (c) 2002-2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import property ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+feature.extend toolset : qcc ;
+
+toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
+generators.override builtin.lib-generator : qcc.prebuilt ;
+toolset.inherit-flags qcc : unix ;
+toolset.inherit-rules qcc : unix ;
+
+# Initializes the qcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one being specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
+# default.
+#
+# Example:
+# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters qcc : version $(version) ] ;
+ local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
+ common.handle-options qcc : $(condition) : $(command) : $(options) ;
+}
+
+
+generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
+
+
+# Declare flags for compilation.
+toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
+
+# Declare flags and action for compilation.
+toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
+
+toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
+toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
+
+toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
+
+toolset.flags qcc.compile OPTIONS <cflags> ;
+toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
+toolset.flags qcc.compile DEFINES <define> ;
+toolset.flags qcc.compile INCLUDES <include> ;
+
+toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
+
+toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+rule compile.c++
+{
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(1) = 128 ;
+ }
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+# The class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by qcc/
+# /libc.
+#
+class qcc-linking-generator : unix-linking-generator
+{
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if <runtime-link>static in [ $(property-set).raw ]
+ {
+ local m ;
+ if [ id ] = "qcc.link.dll"
+ {
+ m = "on qcc, DLL can't be build with <runtime-link>static" ;
+ }
+ if ! $(m)
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ if $(m)
+ {
+ errors.user-error $(m) : "It is suggested to use"
+ "<runtime-link>static together with <link>static." ;
+ }
+ }
+
+ return [ unix-linking-generator.generated-targets
+ $(sources) : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
+ : <toolset>qcc ] ;
+
+generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
+ : SHARED_LIB : <toolset>qcc ] ;
+
+generators.override qcc.prebuilt : builtin.prebuilt ;
+generators.override qcc.searched-lib-generator : searched-lib-generator ;
+
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
+toolset.flags qcc.link OPTIONS <profiling>on : -p ;
+toolset.flags qcc.link OPTIONS <linkflags> ;
+toolset.flags qcc.link LINKPATH <library-path> ;
+toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags qcc.link LIBRARIES <library-file> ;
+
+toolset.flags qcc.link FINDLIBS-SA : m ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link.
+toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
+
+# Assuming this is just like with gcc.
+toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
+toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
+
+
+# Declare actions for linking.
+#
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only qcc links while it might
+ # be a good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
+
+
+# Always remove archive and start again. Here is the rationale from Andre Hentz:
+# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
+# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
+# After some debugging I traced it back to the fact that a1.o was *still* in
+# liba.a
+RM = [ common.rm-command ] ;
+if [ os.name ] = NT
+{
+ RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
+}
+
+
+# Declare action for creating static libraries. The 'r' letter means to add
+# files to the archive with replacement. Since we remove the archive, we do not
+# care about replacement, but there is no option to "add without replacement".
+# The 'c' letter suppresses warnings in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+actions piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc "$(<)" "$(>)"
+}
+
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+
+# Differ from 'link' above only by -shared.
+#
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/qt.jam b/jam-files/boost-build/tools/qt.jam
new file mode 100644
index 000000000..8aa7ca266
--- /dev/null
+++ b/jam-files/boost-build/tools/qt.jam
@@ -0,0 +1,17 @@
+# Copyright (c) 2006 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Forwarning toolset file to Qt GUI library. Forwards to the toolset file
+# for the current version of Qt.
+
+import qt4 ;
+
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ;
+}
+
+
diff --git a/jam-files/boost-build/tools/qt3.jam b/jam-files/boost-build/tools/qt3.jam
new file mode 100644
index 000000000..f82cf0ac3
--- /dev/null
+++ b/jam-files/boost-build/tools/qt3.jam
@@ -0,0 +1,209 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Qt GUI library version 3
+# (http://www.trolltech.com/products/qt3/index.html).
+# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build
+# module.
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+
+# Convert this module into a project, so that we can declare targets here.
+project.initialize $(__name__) ;
+project qt3 ;
+
+
+# Initialized the QT support module. The 'prefix' parameter tells where QT is
+# installed. When not given, environmental variable QTDIR should be set.
+#
+rule init ( prefix ? )
+{
+ if ! $(prefix)
+ {
+ prefix = [ modules.peek : QTDIR ] ;
+ if ! $(prefix)
+ {
+ errors.error
+ "QT installation prefix not given and QTDIR variable is empty" ;
+ }
+ }
+
+ if $(.initialized)
+ {
+ if $(prefix) != $(.prefix)
+ {
+ errors.error
+ "Attempt the reinitialize QT with different installation prefix" ;
+ }
+ }
+ else
+ {
+ .initialized = true ;
+ .prefix = $(prefix) ;
+
+ generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ;
+ # Note: the OBJ target type here is fake, take a look at
+ # qt4.jam/uic-h-generator for explanations that apply in this case as
+ # well.
+ generators.register [ new moc-h-generator-qt3
+ qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ;
+
+ # The UI type is defined in types/qt.jam, and UIC_H is only used in
+ # qt.jam, but not in qt4.jam, so define it here.
+ type.register UIC_H : : H ;
+
+ generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ;
+
+ # The following generator is used to convert UI files to CPP. It creates
+ # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also
+ # returns UIC_H target, so that it can be mocced.
+ class qt::uic-cpp-generator : generator
+ {
+ rule __init__ ( )
+ {
+ generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ;
+ }
+
+ rule run ( project name ? : properties * : sources + )
+ {
+ # Consider this:
+ # obj test : test_a.cpp : <optimization>off ;
+ #
+ # This generator will somehow be called in this case, and,
+ # will fail -- which is okay. However, if there are <library>
+ # properties they will be converted to sources, so the size of
+ # 'sources' will be more than 1. In this case, the base generator
+ # will just crash -- and that's not good. Just use a quick test
+ # here.
+
+ local result ;
+ if ! $(sources[2])
+ {
+ # Construct CPP as usual
+ result = [ generator.run $(project) $(name)
+ : $(properties) : $(sources) ] ;
+
+ # If OK, process UIC_H with moc. It's pretty clear that
+ # the object generated with UIC will have Q_OBJECT macro.
+ if $(result)
+ {
+ local action = [ $(result[1]).action ] ;
+ local sources = [ $(action).sources ] ;
+ local mocced = [ generators.construct $(project) $(name)
+ : CPP : $(properties) : $(sources[2]) ] ;
+ result += $(mocced[2-]) ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+
+ generators.register [ new qt::uic-cpp-generator ] ;
+
+ # Finally, declare prebuilt target for QT library.
+ local usage-requirements =
+ <include>$(.prefix)/include
+ <dll-path>$(.prefix)/lib
+ <library-path>$(.prefix)/lib
+ <allow>qt3
+ ;
+ lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ;
+ lib qt : : <name>qt <threading>single : : $(usage-requirements) ;
+ }
+}
+
+class moc-h-generator-qt3 : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[1]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt3.moc.cpp :
+ $(property-set) ] ;
+
+ local target = [
+ new file-target $(name) : MOC : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that the dependency from
+ # sources to this generated header is detected -- if Jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.prefix) ;
+}
+
+# -f forces moc to include the processed source file. Without it, it would think
+# that .qpp is not a header and would not include it from the generated file.
+#
+actions moc
+{
+ $(.prefix)/bin/moc -f $(>) -o $(<)
+}
+
+# When moccing .cpp files, we don't need -f, otherwise generated code will
+# include .cpp and we'll get duplicated symbols.
+#
+actions moc.cpp
+{
+ $(.prefix)/bin/moc $(>) -o $(<)
+}
+
+
+space = " " ;
+
+# Sometimes it's required to make 'plugins' available during uic invocation. To
+# help with this we add paths to all dependency libraries to uic commane line.
+# The intention is that it's possible to write
+#
+# exe a : ... a.ui ... : <uses>some_plugin ;
+#
+# and have everything work. We'd add quite a bunch of unrelated paths but it
+# won't hurt.
+#
+flags qt3.uic-h LIBRARY_PATH <xdll-path> ;
+actions uic-h
+{
+ $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
+
+
+flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ;
+# The second target is uic-generated header name. It's placed in build dir, but
+# we want to include it using only basename.
+actions uic-cpp
+{
+ $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
diff --git a/jam-files/boost-build/tools/qt4.jam b/jam-files/boost-build/tools/qt4.jam
new file mode 100644
index 000000000..771b9344f
--- /dev/null
+++ b/jam-files/boost-build/tools/qt4.jam
@@ -0,0 +1,713 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2009 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt4 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt4//QtGui /qt4//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://doc.trolltech.com/4.0/moc.html
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# Qt3Support control feature
+#
+# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
+# The autodetection is missing, so we default to disable Qt3Support.
+# This prevents the user from inadvertedly using a deprecated API.
+#
+# The Qt3Support library can be activated by adding
+# "<qt3support>on" to requirements
+#
+# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
+# to get warnings about deprecated Qt3 support funtions and classes.
+# Files ported by the "qt3to4" conversion tool contain _tons_ of
+# warnings, so this define is not set as default.
+#
+# Todo: Detect Qt3Support from Qt's configure data.
+# Or add more auto-configuration (like python).
+feature.feature qt3support : off on : propagated link-incompatible ;
+
+# The Qt version used for requirements
+# Valid are <qt>4.4 or <qt>4.5.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt : : propagated ;
+
+project.initialize $(__name__) ;
+project qt ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice: [qt4-cfg] $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-generator qt4.uic : UI : OBJ :
+ <allow>qt4 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-generator
+ qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
+
+ generators.register [ new moc-inc-generator
+ qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-scanner ;
+
+ # Save value of first occuring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt ]
+ {
+ feature.extend qt : $(version) ;
+ }
+ target-requirements += <qt>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <dll-path>$(.libprefix)
+ <threading>multi
+ <allow>qt4 ;
+
+ local suffix ;
+
+ # Since Qt-4.2, debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .suffix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the libs have "4" suffix, and "d" suffix in debug builds.
+ .suffix_version = "4" ;
+ .suffix_debug = "d" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>QtCore$(.suffix_version)
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>QtCore$(.suffix_debug)$(.suffix_version)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+
+ add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
+ : QT_QT3SUPPORT_LIB QT3_SUPPORT
+ : <qt3support>on $(target-requirements) ;
+
+ # Dummy target to enable "<qt3support>off" and
+ # "<library>/qt//Qt3Support" at the same time. This enables quick
+ # switching from one to the other for test/porting purposes.
+ alias Qt3Support : : <qt3support>off $(target-requirements) ;
+
+ # OpenGl Support
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+
+ # SVG-Support (Qt 4.1)
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ # Test-Support (Qt 4.1)
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+
+ # Support for dynamic Widgets (Qt 4.1)
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support (Qt 4.2)
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine (Qt 4.3)
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+
+ # Tools for the Script-Engine (Qt 4.5)
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit (Qt 4.4)
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+
+ # Phonon Multimedia (Qt 4.4)
+ add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
+
+ # Multimedia engine (Qt 4.6)
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+
+ # XmlPatterns-Engine (Qt 4.4)
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine (Qt 4.4)
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+
+ # AssistantClient Support
+ # Compat library
+ # Pre-4.4 help system, use QtHelp for new programs
+ add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
+
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT4, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, Boost.Build will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore4.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements addional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt4 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(version)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(.suffix_debug)$(version)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt4 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt4.moc INCLUDES <include> ;
+flags qt4.moc DEFINES <define> ;
+
+# Processes headers to create Qt MetaObject information. Qt4-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) -f $(>) -o $(<)
+}
+
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) $(>) -o $(<)
+}
+
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-scanner : include ;
diff --git a/jam-files/boost-build/tools/quickbook-config.jam b/jam-files/boost-build/tools/quickbook-config.jam
new file mode 100644
index 000000000..e983a78a8
--- /dev/null
+++ b/jam-files/boost-build/tools/quickbook-config.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ local boost-dir = ;
+ for local R in snapshot cvs 1.33.0
+ {
+ boost-dir += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)"
+ : "InstallRoot" ] ;
+ }
+ local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
+else
+{
+ local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/quickbook.jam b/jam-files/boost-build/tools/quickbook.jam
new file mode 100644
index 000000000..6de2d42f8
--- /dev/null
+++ b/jam-files/boost-build/tools/quickbook.jam
@@ -0,0 +1,361 @@
+#
+# Copyright (c) 2005 João Abecasis
+# Copyright (c) 2005 Vladimir Prus
+# Copyright (c) 2006 Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+
+# This toolset defines a generator to translate QuickBook to BoostBook. It can
+# be used to generate nice (!) user documentation in different formats
+# (pdf/html/...), from a single text file with simple markup.
+#
+# The toolset defines the QUICKBOOK type (file extension 'qbk') and
+# a QUICKBOOK to XML (BOOSTBOOK) generator.
+#
+#
+# ===========================================================================
+# Q & A
+# ===========================================================================
+#
+# If you don't know what this is all about, some Q & A will hopefully get you
+# up to speed with QuickBook and this toolset.
+#
+#
+# What is QuickBook ?
+#
+# QuickBook is a WikiWiki style documentation tool geared towards C++
+# documentation using simple rules and markup for simple formatting tasks.
+# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook
+# documents are simple text files. A single QuickBook document can
+# generate a fully linked set of nice HTML and PostScript/PDF documents
+# complete with images and syntax-colorized source code.
+#
+#
+# Where can I get QuickBook ?
+#
+# Quickbook can be found in Boost's repository, under the tools/quickbook
+# directory it was added there on Jan 2005, some time after the release of
+# Boost v1.32.0 and has been an integral part of the Boost distribution
+# since v1.33.
+#
+# Here's a link to the SVN repository:
+# https://svn.boost.org/svn/boost/trunk/tools/quickbook
+#
+# And to QuickBook's QuickBook-generated docs:
+# http://www.boost.org/doc/libs/release/tools/quickbook/index.html
+#
+#
+# How do I use QuickBook and this toolset in my projects ?
+#
+# The minimal example is:
+#
+# using boostbook ;
+# import quickbook ;
+#
+# boostbook my_docs : my_docs_source.qbk ;
+#
+# where my_docs is a target name and my_docs_source.qbk is a QuickBook
+# file. The documentation format to be generated is determined by the
+# boostbook toolset. By default html documentation should be generated,
+# but you should check BoostBook's docs to be sure.
+#
+#
+# What do I need ?
+#
+# You should start by setting up the BoostBook toolset. Please refer to
+# boostbook.jam and the BoostBook documentation for information on how to
+# do this.
+#
+# A QuickBook executable is also needed. The toolset will generate this
+# executable if it can find the QuickBook sources. The following
+# directories will be searched:
+#
+# BOOST_ROOT/tools/quickbook/
+# BOOST_BUILD_PATH/../../quickbook/
+#
+# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables)
+#
+# If QuickBook sources are not found the toolset will then try to use
+# the shell command 'quickbook'.
+#
+#
+# How do I provide a custom QuickBook executable ?
+#
+# You may put the following in your user-config.jam or site-config.jam:
+#
+# using quickbook : /path/to/quickbook ;
+#
+# or, if 'quickbook' can be found in your PATH,
+#
+# using quickbook : quickbook ;
+#
+#
+# For convenience three alternatives are tried to get a QuickBook executable:
+#
+# 1. If the user points us to the a QuickBook executable, that is used.
+#
+# 2. Otherwise, we search for the QuickBook sources and compile QuickBook
+# using the default toolset.
+#
+# 3. As a last resort, we rely on the shell for finding 'quickbook'.
+#
+
+import boostbook ;
+import "class" : new ;
+import feature ;
+import generators ;
+import toolset ;
+import type ;
+import scanner ;
+import project ;
+import targets ;
+import build-system ;
+import path ;
+import common ;
+import errors ;
+
+# The one and only QUICKBOOK type!
+type.register QUICKBOOK : qbk ;
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+feature.feature <quickbook-binary> : : free ;
+feature.feature <quickbook-binary-dependencies> : : free dependency ;
+feature.feature <quickbook-define> : : free ;
+feature.feature <quickbook-indent> : : free ;
+feature.feature <quickbook-line-width> : : free ;
+
+
+# quickbook-binary-generator handles generation of the QuickBook executable, by
+# marking it as a dependency for QuickBook docs.
+#
+# If the user supplied the QuickBook command that will be used.
+#
+# Otherwise we search some sensible places for the QuickBook sources and compile
+# from scratch using the default toolset.
+#
+# As a last resort we rely on the shell to find 'quickbook'.
+#
+class quickbook-binary-generator : generator
+{
+ import modules path targets quickbook ;
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ quickbook.freeze-config ;
+ # QuickBook invocation command and dependencies.
+ local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ;
+ local quickbook-binary-dependencies ;
+
+ if ! $(quickbook-binary)
+ {
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ;
+ if $(quickbook-dir)
+ {
+ # Get the main-target in QuickBook directory.
+ local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ;
+
+ # The first element are actual targets, the second are
+ # properties found in target-id. We do not care about these
+ # since we have passed the id ourselves.
+ quickbook-main-target =
+ [ $(quickbook-main-target[1]).main-target quickbook ] ;
+
+ quickbook-binary-dependencies =
+ [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ;
+
+ # Ignore usage-requirements returned as first element.
+ quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ;
+
+ # Some toolsets generate extra targets (e.g. RSP). We must mark
+ # all targets as dependencies for the project, but we will only
+ # use the EXE target for quickbook-to-boostbook translation.
+ for local target in $(quickbook-binary-dependencies)
+ {
+ if [ $(target).type ] = EXE
+ {
+ quickbook-binary =
+ [ path.native
+ [ path.join
+ [ $(target).path ]
+ [ $(target).name ]
+ ]
+ ] ;
+ }
+ }
+ }
+ }
+
+ # Add $(quickbook-binary-dependencies) as a dependency of the current
+ # project and set it as the <quickbook-binary> feature for the
+ # quickbook-to-boostbook rule, below.
+ property-set = [ $(property-set).add-raw
+ <dependency>$(quickbook-binary-dependencies)
+ <quickbook-binary>$(quickbook-binary)
+ <quickbook-binary-dependencies>$(quickbook-binary-dependencies)
+ ] ;
+
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ;
+ }
+}
+
+
+# Define a scanner for tracking QBK include dependencies.
+#
+class qbk-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "\\[[ ]*include[ ]+([^]]+)\\]"
+ "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"
+ "\\[[ ]*import[ ]+([^]]+)\\]" ;
+ }
+}
+
+
+scanner.register qbk-scanner : include ;
+
+type.set-scanner QUICKBOOK : qbk-scanner ;
+
+
+# Initialization of toolset.
+#
+# Parameters:
+# command ? -> path to QuickBook executable.
+#
+# When command is not supplied toolset will search for QuickBook directory and
+# compile the executable from source. If that fails we still search the path for
+# 'quickbook'.
+#
+rule init (
+ command ? # path to the QuickBook executable.
+ )
+{
+ if $(command)
+ {
+ if $(.config-frozen)
+ {
+ errors.user-error "quickbook: configuration cannot be changed after it has been used." ;
+ }
+ .command = $(command) ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+
+ # QuickBook invocation command and dependencies.
+
+ .quickbook-binary = $(.command) ;
+
+ if $(.quickbook-binary)
+ {
+ # Use user-supplied command.
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ;
+ }
+ else
+ {
+ # Search for QuickBook sources in sensible places, like
+ # $(BOOST_ROOT)/tools/quickbook
+ # $(BOOST_BUILD_PATH)/../../quickbook
+
+ # And build quickbook executable from sources.
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ local boost-build-path = [ build-system.location ] ;
+
+ if $(boost-root)
+ {
+ .quickbook-dir += [ path.join $(boost-root) tools ] ;
+ }
+
+ if $(boost-build-path)
+ {
+ .quickbook-dir += $(boost-build-path)/../.. ;
+ }
+
+ .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ;
+
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ if $(.quickbook-dir)
+ {
+ .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ;
+ }
+ else
+ {
+ ECHO "QuickBook warning: The path to the quickbook executable was" ;
+ ECHO " not provided. Additionally, couldn't find QuickBook" ;
+ ECHO " sources searching in" ;
+ ECHO " * BOOST_ROOT/tools/quickbook" ;
+ ECHO " * BOOST_BUILD_PATH/../../quickbook" ;
+ ECHO " Will now try to find a precompiled executable by searching" ;
+ ECHO " the PATH for 'quickbook'." ;
+ ECHO " To disable this warning in the future, or to completely" ;
+ ECHO " avoid compilation of quickbook, you can explicitly set the" ;
+ ECHO " path to a quickbook executable command in user-config.jam" ;
+ ECHO " or site-config.jam with the call" ;
+ ECHO " using quickbook : /path/to/quickbook ;" ;
+
+ # As a last resort, search for 'quickbook' command in path. Note
+ # that even if the 'quickbook' command is not found,
+ # get-invocation-command will still return 'quickbook' and might
+ # generate an error while generating the virtual-target.
+
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ;
+ }
+ }
+ }
+}
+
+
+generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ;
+
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
+toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
+
+
+rule quickbook-to-boostbook ( target : source : properties * )
+{
+ # Signal dependency of quickbook sources on <quickbook-binary-dependencies>
+ # upon invocation of quickbook-to-boostbook.
+ DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ;
+}
+
+
+actions quickbook-to-boostbook
+{
+ "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)"
+}
+
+
+# Declare a main target to convert a quickbook source into a boostbook XML file.
+#
+rule to-boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : XML
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
diff --git a/jam-files/boost-build/tools/rc.jam b/jam-files/boost-build/tools/rc.jam
new file mode 100644
index 000000000..9964d339b
--- /dev/null
+++ b/jam-files/boost-build/tools/rc.jam
@@ -0,0 +1,156 @@
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+import generators ;
+import feature ;
+import errors ;
+import scanner ;
+import toolset : flags ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+type.register RC : rc ;
+
+rule init ( )
+{
+}
+
+# Configures a new resource compilation command specific to a condition,
+# usually a toolset selection condition. The possible options are:
+#
+# * <rc-type>(rc|windres) - Indicates the type of options the command
+# accepts.
+#
+# Even though the arguments are all optional, only when a command, condition,
+# and at minimum the rc-type option are given will the command be configured.
+# This is so that callers don't have to check auto-configuration values
+# before calling this. And still get the functionality of build failures when
+# the resource compiler can't be found.
+#
+rule configure ( command ? : condition ? : options * )
+{
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+
+ if $(command) && $(condition) && $(rc-type)
+ {
+ flags rc.compile.resource .RC $(condition) : $(command) ;
+ flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
+ flags rc.compile.resource DEFINES <define> ;
+ flags rc.compile.resource INCLUDES <include> ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using rc compiler :: $(condition) :: $(command) ;
+ }
+ }
+}
+
+rule compile.resource ( target : sources * : properties * )
+{
+ local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
+ rc-type ?= null ;
+ compile.resource.$(rc-type) $(target) : $(sources[1]) ;
+}
+
+actions compile.resource.rc
+{
+ "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
+}
+
+actions compile.resource.windres
+{
+ "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
+}
+
+actions quietly compile.resource.null
+{
+ as /dev/null -o "$(<)"
+}
+
+# Since it's a common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to
+# avoid conflict with hello.cpp.
+# The reason we generate OBJ and not RES, is that gcc does not
+# seem to like RES files, but works OK with OBJ.
+# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
+
+# Register scanner for resources
+class res-scanner : scanner
+{
+ import regex virtual-target path scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
+ local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
+ local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
+
+ # Icons and other includes may referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes to single ones.
+ res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+ res = $(res:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ DEPENDS $(target) : $(res) ;
+ NOCARE $(all) $(res) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+ SEARCH on $(res) = $(b) $(self.includes:G=) ;
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+ }
+}
+
+scanner.register res-scanner : include ;
+type.set-scanner RC : res-scanner ;
diff --git a/jam-files/boost-build/tools/stage.jam b/jam-files/boost-build/tools/stage.jam
new file mode 100644
index 000000000..296e7558e
--- /dev/null
+++ b/jam-files/boost-build/tools/stage.jam
@@ -0,0 +1,524 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import targets ;
+import "class" : new ;
+import errors ;
+import type ;
+import generators ;
+import feature ;
+import project ;
+import virtual-target ;
+import path ;
+import types/register ;
+
+
+feature.feature <install-dependencies> : off on : incidental ;
+feature.feature <install-type> : : free incidental ;
+feature.feature <install-source-root> : : free path ;
+feature.feature <so-version> : : free incidental ;
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature <install-no-version-symlinks> : on : optional incidental ;
+
+
+class install-target-class : basic-target
+{
+ import feature ;
+ import project ;
+ import type ;
+ import errors ;
+ import generators ;
+ import path ;
+ import stage ;
+ import "class" : new ;
+ import property ;
+ import property-set ;
+
+ rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
+ {
+ basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
+ $(requirements) : $(default-build) ;
+ }
+
+ # If <location> is not set, sets it based on the project data.
+ #
+ rule update-location ( property-set )
+ {
+ local loc = [ $(property-set).get <location> ] ;
+ if ! $(loc)
+ {
+ loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
+ property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
+ }
+
+ return $(property-set) ;
+ }
+
+ # Takes a target that is installed and a property set which is used when
+ # installing.
+ #
+ rule adjust-properties ( target : build-property-set )
+ {
+ local ps-raw ;
+ local a = [ $(target).action ] ;
+ if $(a)
+ {
+ local ps = [ $(a).properties ] ;
+ ps-raw = [ $(ps).raw ] ;
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+ if [ $(build-property-set).get <hardcode-dll-paths> ] != true
+ {
+ ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
+ }
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ local l = [ $(build-property-set).get <dll-path> ] ;
+ ps-raw += $(l:G=<dll-path>) ;
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ local l = [ $(build-property-set).get <linkflags> ] ;
+ ps-raw += $(l:G=<linkflags>) ;
+
+ # Remove the <tag> feature on original targets.
+ ps-raw = [ property.change $(ps-raw) : <tag> ] ;
+
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ ps-raw = [ property.change $(ps-raw) : <location> ] ;
+ }
+
+ local d = [ $(build-property-set).get <dependency> ] ;
+ ps-raw += $(d:G=<dependency>) ;
+
+ local d = [ $(build-property-set).get <location> ] ;
+ ps-raw += $(d:G=<location>) ;
+
+ local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
+ ps-raw += $(ns:G=<install-no-version-symlinks>) ;
+
+ local d = [ $(build-property-set).get <install-source-root> ] ;
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if $(d)
+ {
+ d = [ path.root $(d) [ path.pwd ] ] ;
+ ps-raw += $(d:G=<install-source-root>) ;
+ }
+
+ if $(ps-raw)
+ {
+ return [ property-set.create $(ps-raw) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ source-targets = [ targets-to-stage $(source-targets) :
+ $(property-set) ] ;
+
+ property-set = [ update-location $(property-set) ] ;
+
+ local ename = [ $(property-set).get <name> ] ;
+
+ if $(ename) && $(source-targets[2])
+ {
+ errors.error "When <name> property is used in 'install', only one"
+ "source is allowed" ;
+ }
+
+ local result ;
+ for local i in $(source-targets)
+ {
+ local staged-targets ;
+
+ local new-properties = [ adjust-properties $(i) :
+ $(property-set) ] ;
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ local t = [ $(i).type ] ;
+ if $(t) && [ type.registered INSTALLED_$(t) ]
+ {
+ if $(ename)
+ {
+ errors.error "In 'install': <name> property specified with target that requires relinking." ;
+ }
+ else
+ {
+ local targets = [ generators.construct $(self.project)
+ $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
+ staged-targets += $(targets[2-]) ;
+ }
+ }
+ else
+ {
+ staged-targets = [ stage.copy-file $(self.project) $(ename) :
+ $(i) : $(new-properties) ] ;
+ }
+
+ if ! $(staged-targets)
+ {
+ errors.error "Unable to generate staged version of " [ $(source).str ] ;
+ }
+
+ for t in $(staged-targets)
+ {
+ result += [ virtual-target.register $(t) ] ;
+ }
+ }
+
+ return [ property-set.empty ] $(result) ;
+ }
+
+ # Given the list of source targets explicitly passed to 'stage', returns the
+ # list of targets which must be staged.
+ #
+ rule targets-to-stage ( source-targets * : property-set )
+ {
+ local result ;
+
+ # Traverse the dependencies, if needed.
+ if [ $(property-set).get <install-dependencies> ] = "on"
+ {
+ source-targets = [ collect-targets $(source-targets) ] ;
+ }
+
+ # Filter the target types, if needed.
+ local included-types = [ $(property-set).get <install-type> ] ;
+ for local r in $(source-targets)
+ {
+ local ty = [ $(r).type ] ;
+ if $(ty)
+ {
+ # Do not stage searched libs.
+ if $(ty) != SEARCHED_LIB
+ {
+ if $(included-types)
+ {
+ if [ include-type $(ty) : $(included-types) ]
+ {
+ result += $(r) ;
+ }
+ }
+ else
+ {
+ result += $(r) ;
+ }
+ }
+ }
+ else if ! $(included-types)
+ {
+ # Don't install typeless target if there is an explicit list of
+ # allowed types.
+ result += $(r) ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ rule collect-targets ( targets * )
+ {
+ # Find subvariants
+ local s ;
+ for local t in $(targets)
+ {
+ s += [ $(t).creating-subvariant ] ;
+ }
+ s = [ sequence.unique $(s) ] ;
+
+ local result = [ new set ] ;
+ $(result).add $(targets) ;
+
+ for local i in $(s)
+ {
+ $(i).all-referenced-targets $(result) ;
+ }
+ local result2 ;
+ for local r in [ $(result).list ]
+ {
+ if $(r:G) != <use>
+ {
+ result2 += $(r:G=) ;
+ }
+ }
+ DELETE_MODULE $(result) ;
+ result = [ sequence.unique $(result2) ] ;
+ }
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ local rule include-type ( type : types-to-include * )
+ {
+ local found ;
+ while $(types-to-include) && ! $(found)
+ {
+ if [ type.is-subtype $(type) $(types-to-include[1]) ]
+ {
+ found = true ;
+ }
+ types-to-include = $(types-to-include[2-]) ;
+ }
+
+ return $(found) ;
+ }
+}
+
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+rule copy-file ( project name ? : source : properties )
+{
+ name ?= [ $(source).name ] ;
+ local relative ;
+
+ local new-a = [ new non-scanning-action $(source) : common.copy :
+ $(properties) ] ;
+ local source-root = [ $(properties).get <install-source-root> ] ;
+ if $(source-root)
+ {
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ local path = [ $(source).path ] ;
+ path = [ path.root $(name:D) $(path) ] ;
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = [ path.root $(path) [ path.pwd ] ] ;
+
+ relative = [ path.relative-to $(source-root) $(path) ] ;
+ }
+
+ # Note: Using $(name:D=$(relative)) might be faster here, but then we would
+ # need to explicitly check that relative is not ".", otherwise we might get
+ # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
+ # fail.
+ name = [ path.join $(relative) $(name:D=) ] ;
+
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(new-a) ] ;
+}
+
+
+rule symlink ( name : project : source : properties )
+{
+ local a = [ new action $(source) : symlink.ln : $(properties) ] ;
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(a) ] ;
+}
+
+
+rule relink-file ( project : source : property-set )
+{
+ local action = [ $(source).action ] ;
+ local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
+ "" : $(property-set) ] ;
+ return [ $(cloned-action).targets ] ;
+}
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+type.register INSTALLED_EXE : : EXE ;
+
+
+class installed-exe-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-exe : EXE : INSTALLED_EXE ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ local need-relink ;
+
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ }
+ else
+ {
+ # See if the dll-path properties are not changed during
+ # install. If so, copy, don't relink.
+ local a = [ $(source).action ] ;
+ local p = [ $(a).properties ] ;
+ local original = [ $(p).get <dll-path> ] ;
+ local current = [ $(property-set).get <dll-path> ] ;
+
+ if $(current) != $(original)
+ {
+ need-relink = true ;
+ }
+ }
+
+
+ if $(need-relink)
+ {
+ return [ stage.relink-file $(project)
+ : $(source) : $(property-set) ] ;
+ }
+ else
+ {
+ return [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
+ }
+ }
+}
+
+
+generators.register [ new installed-exe-generator ] ;
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
+
+
+class installed-shared-lib-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-shared-lib : SHARED_LIB
+ : INSTALLED_SHARED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ local copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ return [ virtual-target.register $(copied) ] ;
+ }
+ else
+ {
+ local a = [ $(source).action ] ;
+ local copied ;
+ if ! $(a)
+ {
+ # Non-derived file, just copy.
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ local cp = [ $(a).properties ] ;
+ local current-dll-path = [ $(cp).get <dll-path> ] ;
+ local new-dll-path = [ $(property-set).get <dll-path> ] ;
+
+ if $(current-dll-path) != $(new-dll-path)
+ {
+ # Rpath changed, need to relink.
+ copied = [ stage.relink-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ }
+
+ copied = [ virtual-target.register $(copied) ] ;
+
+ local result = $(copied) ;
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
+ : [ $(copied).name ] ] ;
+ if $(m)
+ {
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
+ $(property-set) ] ;
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ local suppress =
+ [ $(property-set).get <install-no-version-symlinks> ] ;
+
+ if $(suppress) != "on"
+ {
+ result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
+ : $(copied) : $(property-set) ] ;
+ result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
+ : $(copied) : $(property-set) ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+}
+
+generators.register [ new installed-shared-lib-generator ] ;
+
+
+# Main target rule for 'install'.
+#
+rule install ( name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if ! <hardcode-dll-paths>true in $(requirements)
+ {
+ requirements += <hardcode-dll-paths>false ;
+ }
+
+ if <tag> in $(requirements:G)
+ {
+ errors.user-error
+ "The <tag> property is not allowed for the 'install' rule" ;
+ }
+
+ targets.main-target-alternative
+ [ new install-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : install : : install ;
+IMPORT $(__name__) : install : : stage ;
diff --git a/jam-files/boost-build/tools/stlport.jam b/jam-files/boost-build/tools/stlport.jam
new file mode 100644
index 000000000..62eebda5f
--- /dev/null
+++ b/jam-files/boost-build/tools/stlport.jam
@@ -0,0 +1,303 @@
+# Copyright Gennadiy Rozental
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The STLPort is usable by means of 'stdlib' feature. When
+# stdlib=stlport is specified, default version of STLPort will be used,
+# while stdlib=stlport-4.5 will use specific version.
+# The subfeature value 'hostios' means to use host compiler's iostreams.
+#
+# The specific version of stlport is selected by features:
+# The <runtime-link> feature selects between static and shared library
+# The <runtime-debugging>on selects STLPort with debug symbols
+# and stl debugging.
+# There's no way to use STLPort with debug symbols but without
+# stl debugging.
+
+# TODO: must implement selection of different STLPort installations based
+# on used toolset.
+# Also, finish various flags:
+#
+# This is copied from V1 toolset, "+" means "implemented"
+#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
+# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
+
+
+import feature : feature subfeature ;
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+import common ;
+import type ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project stlport ;
+
+# The problem: how to request to use host compiler's iostreams?
+#
+# Solution 1: Global 'stlport-iostream' feature.
+# That's ugly. Subfeature make more sense for stlport-specific thing.
+# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
+# is default.
+# The problem is that such subfeature will appear in target paths, and that's ugly
+# Solution 3: Use optional subfeature with only one value.
+
+feature.extend stdlib : stlport ;
+feature.compose <stdlib>stlport : <library>/stlport//stlport ;
+
+# STLport iostreams or native iostreams
+subfeature stdlib stlport : iostream : hostios : optional propagated ;
+
+# STLport extensions
+subfeature stdlib stlport : extensions : noext : optional propagated ;
+
+# STLport anachronisms -- NOT YET SUPPORTED
+# subfeature stdlib stlport : anachronisms : on off ;
+
+# STLport debug allocation -- NOT YET SUPPORTED
+#subfeature stdlib stlport : debug-alloc : off on ;
+
+# Declare a special target class to handle the creation of search-lib-target
+# instances for STLport. We need a special class, because otherwise we'll have
+# - declare prebuilt targets for all possible toolsets. And by the time 'init'
+# is called we don't even know the list of toolsets that are registered
+# - when host iostreams are used, we really should produce nothing. It would
+# be hard/impossible to achieve this using prebuilt targets.
+
+class stlport-target-class : basic-target
+{
+ import feature project type errors generators ;
+ import set : difference ;
+
+ rule __init__ ( project : headers ? : libraries * : version ? )
+ {
+ basic-target.__init__ stlport : $(project) ;
+ self.headers = $(headers) ;
+ self.libraries = $(libraries) ;
+ self.version = $(version) ;
+ self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
+
+ local requirements ;
+ requirements += <stdlib-stlport:version>$(self.version) ;
+ self.requirements = [ property-set.create $(requirements) ] ;
+ }
+
+ rule generate ( property-set )
+ {
+ # Since this target is built with <stdlib>stlport, it will also
+ # have <library>/stlport//stlport in requirements, which will
+ # cause a loop in main target references. Remove that property
+ # manually.
+
+ property-set = [ property-set.create
+ [ difference
+ [ $(property-set).raw ] :
+ <library>/stlport//stlport
+ <stdlib>stlport
+ ]
+ ] ;
+ return [ basic-target.generate $(property-set) ] ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ # Deduce the name of stlport library, based on toolset and
+ # debug setting.
+ local raw = [ $(property-set).raw ] ;
+ local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
+ local toolset = [ feature.get-values <toolset> : $(raw) ] ;
+
+ if $(self.version.5)
+ {
+ # Version 5.x
+
+ # STLport host IO streams no longer supported. So we always
+ # need libraries.
+
+ # name: stlport(stl)?[dg]?(_static)?.M.R
+ local name = stlport ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name += stl ;
+ switch $(toolset)
+ {
+ case gcc* : name += g ;
+ case darwin* : name += g ;
+ case * : name += d ;
+ }
+ }
+
+ if [ feature.get-values <runtime-link> : $(raw) ] = "static"
+ {
+ name += _static ;
+ }
+
+ # Starting with version 5.2.0, the STLport static libraries no longer
+ # include a version number in their name
+ local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
+ if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"
+ {
+ name += .$(self.version.5) ;
+ }
+
+ name = $(name:J=) ;
+
+ if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
+ {
+ #~ Allow explicitly asking to install the STLport lib by
+ #~ refering to it directly: /stlport//stlport/<install-dependencies>on
+ #~ This allows for install packaging of all libs one might need for
+ #~ a standalone distribution.
+ import path : make : path-make ;
+ local runtime-link
+ = [ feature.get-values <runtime-link> : $(raw) ] ;
+ local lib-file.props
+ = [ property-set.create $(raw) <link>$(runtime-link) ] ;
+ local lib-file.prefix
+ = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
+ local lib-file.suffix
+ = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
+ lib-file.prefix
+ ?= "" "lib" ;
+ lib-file.suffix
+ ?= "" ;
+ local lib-file
+ = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
+ $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
+ lib-file
+ = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ;
+ lib-file
+ = [ $(lib-file).generate "" ] ;
+ local lib-file.requirements
+ = [ targets.main-target-requirements
+ [ $(lib-file.props).raw ] <file>$(lib-file[-1])
+ : $(self.project) ] ;
+ return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ;
+ }
+ else
+ {
+ #~ Otherwise, it's just a regular usage of the library.
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ }
+ else if ! $(hostios) && $(toolset) != msvc
+ {
+ # We don't need libraries if host istreams are used. For
+ # msvc, automatic library selection will be used.
+
+ # name: stlport_<toolset>(_stldebug)?
+ local name = stlport ;
+ name = $(name)_$(toolset) ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name = $(name)_stldebug ;
+ }
+
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local usage-requirements =
+ <include>$(self.headers)
+ <dll-path>$(self.libraries)
+ <library-path>$(self.libraries)
+ ;
+
+ local rproperties = [ $(subvariant).build-properties ] ;
+ # CONSIDER: should this "if" sequence be replaced with
+ # some use of 'property-map' class?
+ if [ $(rproperties).get <runtime-debugging> ] = "on"
+ {
+ usage-requirements +=
+ <define>_STLP_DEBUG=1
+ <define>_STLP_DEBUG_UNINITIALIZED=1 ;
+ }
+ if [ $(rproperties).get <runtime-link> ] = "shared"
+ {
+ usage-requirements +=
+ <define>_STLP_USE_DYNAMIC_LIB=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
+ {
+ usage-requirements +=
+ <define>_STLP_NO_EXTENSIONS=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
+ {
+ usage-requirements +=
+ <define>_STLP_NO_OWN_IOSTREAMS=1
+ <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+ }
+ if $(self.version.5)
+ {
+ # Version 5.x
+ if [ $(rproperties).get <threading> ] = "single"
+ {
+ # Since STLport5 doesn't normally support single-thread
+ # we force STLport5 into the multi-thread mode. Hence
+ # getting what other libs provide of single-thread code
+ # linking against a multi-thread lib.
+ usage-requirements +=
+ <define>_STLP_THREADS=1 ;
+ }
+ }
+
+ return [ property-set.create $(usage-requirements) ] ;
+ }
+}
+
+rule stlport-target ( headers ? : libraries * : version ? )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new stlport-target-class $(project) : $(headers) : $(libraries)
+ : $(version)
+ ] ;
+}
+
+local .version-subfeature-defined ;
+
+# Initialize stlport support.
+rule init (
+ version ? :
+ headers : # Location of header files
+ libraries * # Location of libraries, lib and bin subdirs of STLport.
+ )
+{
+ # FIXME: need to use common.check-init-parameters here.
+ # At the moment, that rule always tries to define subfeature
+ # of the 'toolset' feature, while we need to define subfeature
+ # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
+ if $(version)
+ {
+ if ! $(.version-subfeature-defined)
+ {
+ feature.subfeature stdlib stlport : version : : propagated ;
+ .version-subfeature-defined = true ;
+ }
+ feature.extend-subfeature stdlib stlport : version : $(version) ;
+ }
+
+ # Declare the main target for this STLPort version.
+ stlport-target $(headers) : $(libraries) : $(version) ;
+}
+
diff --git a/jam-files/boost-build/tools/sun.jam b/jam-files/boost-build/tools/sun.jam
new file mode 100644
index 000000000..0ca927d3e
--- /dev/null
+++ b/jam-files/boost-build/tools/sun.jam
@@ -0,0 +1,142 @@
+# Copyright (C) Christopher Currie 2003. Permission to copy, use,
+# modify, sell and distribute this software is granted provided this
+# copyright notice appears in all copies. This software is provided
+# "as is" without express or implied warranty, and with no claim as
+# to its suitability for any purpose.
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+
+feature.extend toolset : sun ;
+toolset.inherit sun : unix ;
+generators.override sun.prebuilt : builtin.lib-generator ;
+generators.override sun.prebuilt : builtin.prebuilt ;
+generators.override sun.searched-lib-generator : searched-lib-generator ;
+
+feature.extend stdlib : sun-stlport ;
+feature.compose <stdlib>sun-stlport
+ : <cxxflags>-library=stlport4 <linkflags>-library=stlport4
+ ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters sun : version $(version) ] ;
+
+ command = [ common.get-invocation-command sun : CC
+ : $(command) : "/opt/SUNWspro/bin" ] ;
+
+ # Even if the real compiler is not found, put CC to
+ # command line so that user see command line that would have being executed.
+ command ?= CC ;
+
+ common.handle-options sun : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ;
+}
+
+# Declare generators
+generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ;
+generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ;
+
+# Declare flags and actions for compilation
+flags sun.compile OPTIONS <debug-symbols>on : -g ;
+flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.compile OPTIONS <optimization>speed : -xO4 ;
+flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ;
+flags sun.compile OPTIONS <threading>multi : -mt ;
+flags sun.compile OPTIONS <warnings>off : -erroff ;
+flags sun.compile OPTIONS <warnings>on : -erroff=%none ;
+flags sun.compile OPTIONS <warnings>all : -erroff=%none ;
+flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ;
+
+flags sun.compile.c++ OPTIONS <inlining>off : +d ;
+
+# The -m32 and -m64 options are supported starting
+# with Sun Studio 12. On earlier compilers, the
+# 'address-model' feature is not supported and should not
+# be used. Instead, use -xarch=generic64 command line
+# option.
+# See http://svn.boost.org/trac/boost/ticket/1186
+# for details.
+flags sun OPTIONS <address-model>32 : -m32 ;
+flags sun OPTIONS <address-model>64 : -m64 ;
+# On sparc, there's a difference between -Kpic
+# and -KPIC. The first is slightly more efficient,
+# but has the limits on the size of GOT table.
+# For minimal fuss on user side, we use -KPIC here.
+# See http://svn.boost.org/trac/boost/ticket/1186#comment:6
+# for detailed explanation.
+flags sun OPTIONS <link>shared : -KPIC ;
+
+flags sun.compile OPTIONS <cflags> ;
+flags sun.compile.c++ OPTIONS <cxxflags> ;
+flags sun.compile DEFINES <define> ;
+flags sun.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags sun.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+flags sun.link OPTIONS <debug-symbols>off : -s ;
+flags sun.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.link OPTIONS <threading>multi : -mt ;
+flags sun.link OPTIONS <linkflags> ;
+flags sun.link LINKPATH <library-path> ;
+flags sun.link FINDLIBS-ST <find-static-library> ;
+flags sun.link FINDLIBS-SA <find-shared-library> ;
+flags sun.link LIBRARIES <library-file> ;
+flags sun.link LINK-RUNTIME <runtime-link>static : static ;
+flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags sun.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags sun.link RPATH <xdll-path> ;
+
+# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.)
+flags sun.link FINDLIBS-SA : rt ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)"
+}
+
diff --git a/jam-files/boost-build/tools/symlink.jam b/jam-files/boost-build/tools/symlink.jam
new file mode 100644
index 000000000..b33e8260c
--- /dev/null
+++ b/jam-files/boost-build/tools/symlink.jam
@@ -0,0 +1,140 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines the "symlink" special target. 'symlink' targets make symbolic links
+# to the sources.
+
+import targets modules path class os feature project property-set ;
+
+.count = 0 ;
+
+feature.feature symlink-location : project-relative build-relative : incidental ;
+
+# The class representing "symlink" targets.
+#
+class symlink-targets : basic-target
+{
+ import numbers modules class property project path ;
+
+ rule __init__ (
+ project
+ : targets *
+ : sources *
+ )
+ {
+ # Generate a fake name for now. Need unnamed targets eventually.
+ local c = [ modules.peek symlink : .count ] ;
+ modules.poke symlink : .count : [ numbers.increment $(c) ] ;
+ local fake-name = symlink#$(c) ;
+
+ basic-target.__init__ $(fake-name) : $(project) : $(sources) ;
+
+ # Remember the targets to map the sources onto. Pad or truncate
+ # to fit the sources given.
+ self.targets = ;
+ for local source in $(sources)
+ {
+ if $(targets)
+ {
+ self.targets += $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ self.targets += $(source) ;
+ }
+ }
+
+ # The virtual targets corresponding to the given targets.
+ self.virtual-targets = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local i = 1 ;
+ for local t in $(source-targets)
+ {
+ local s = $(self.targets[$(i)]) ;
+ local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ;
+ local vt = [ class.new file-target $(s:D=)
+ : [ $(t).type ] : $(self.project) : $(a) ] ;
+
+ # Place the symlink in the directory relative to the project
+ # location, instead of placing it in the build directory.
+ if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative
+ {
+ $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ;
+ }
+
+ self.virtual-targets += $(vt) ;
+ i = [ numbers.increment $(i) ] ;
+ }
+ return [ property-set.empty ] $(self.virtual-targets) ;
+ }
+}
+
+# Creates a symbolic link from a set of targets to a set of sources.
+# The targets and sources map one to one. The symlinks generated are
+# limited to be the ones given as the sources. That is, the targets
+# are either padded or trimmed to equate to the sources. The padding
+# is done with the name of the corresponding source. For example::
+#
+# symlink : one two ;
+#
+# Is equal to::
+#
+# symlink one two : one two ;
+#
+# Names for symlink are relative to the project location. They cannot
+# include ".." path components.
+rule symlink (
+ targets *
+ : sources *
+ )
+{
+ local project = [ project.current ] ;
+
+ return [ targets.main-target-alternative
+ [ class.new symlink-targets $(project) : $(targets) :
+ # Note: inline targets are not supported for symlink, intentionally,
+ # since it's used to linking existing non-local targets.
+ $(sources) ] ] ;
+}
+
+rule ln
+{
+ local os ;
+ if [ modules.peek : UNIX ] { os = UNIX ; }
+ else { os ?= [ os.name ] ; }
+ # Remember the path to make the link relative to where the symlink is located.
+ local path-to-source = [ path.relative-to
+ [ path.make [ on $(<) return $(LOCATE) ] ]
+ [ path.make [ on $(>) return $(LOCATE) ] ] ] ;
+ if $(path-to-source) = .
+ {
+ PATH_TO_SOURCE on $(<) = "" ;
+ }
+ else
+ {
+ PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ;
+ }
+ ln-$(os) $(<) : $(>) ;
+}
+
+actions ln-UNIX
+{
+ ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'
+}
+
+# there is a way to do this; we fall back to a copy for now
+actions ln-NT
+{
+ echo "NT symlinks not supported yet, making copy"
+ del /f /q "$(<)" 2>nul >nul
+ copy "$(>)" "$(<)" $(NULL_OUT)
+}
+
+IMPORT $(__name__) : symlink : : symlink ;
diff --git a/jam-files/boost-build/tools/testing-aux.jam b/jam-files/boost-build/tools/testing-aux.jam
new file mode 100644
index 000000000..525dafd0c
--- /dev/null
+++ b/jam-files/boost-build/tools/testing-aux.jam
@@ -0,0 +1,210 @@
+# This module is imported by testing.py. The definitions here are
+# too tricky to do in Python
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+IMPORT testing : expect-success : : testing.expect-success ;
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+IMPORT testing : expect-failure : : testing.expect-failure ;
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+IMPORT testing : capture-output : : testing.capture-output ;
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
+}
+
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+}
+
+# Calling this rule requests that Boost Build time how long it taks to build the
+# 'source' target and display the results both on the standard output and in the
+# 'target' file.
+#
+rule time ( target : source : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
+
+ # Make sure that the source is rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(source) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME)" seconds" > "$(<)"
+ echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
+}
diff --git a/jam-files/boost-build/tools/testing.jam b/jam-files/boost-build/tools/testing.jam
new file mode 100644
index 000000000..c42075b78
--- /dev/null
+++ b/jam-files/boost-build/tools/testing.jam
@@ -0,0 +1,581 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+
+import alias ;
+import "class" ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import type ;
+import virtual-target ;
+
+
+rule init ( )
+{
+}
+
+
+# Feature controling the command used to lanch test programs.
+feature.feature testing.launcher : : free optional ;
+
+feature.feature test-info : : free incidental ;
+feature.feature testing.arg : : free incidental ;
+feature.feature testing.input-file : : free dependency ;
+
+feature.feature preserve-test-targets : on off : incidental propagated ;
+
+# Register target types.
+type.register TEST : test ;
+type.register COMPILE : : TEST ;
+type.register COMPILE_FAIL : : TEST ;
+type.register RUN_OUTPUT : run ;
+type.register RUN : : TEST ;
+type.register RUN_FAIL : : TEST ;
+type.register LINK_FAIL : : TEST ;
+type.register LINK : : TEST ;
+type.register UNIT_TEST : passed : TEST ;
+
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+#
+rule make-test ( target-type : sources + : requirements * : target-name ? )
+{
+ target-name ?= $(sources[1]:D=:S=) ;
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
+
+ local project = [ project.current ] ;
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ local t = [ targets.create-typed-target [ type.type-from-rule-name
+ $(target-type) ] : $(project) : $(real-name) : $(sources) :
+ $(requirements) <location-prefix>$(real-name).test ] ;
+
+ # The alias to the real target, per period replacement above.
+ if $(real-name) != $(target-name)
+ {
+ alias $(target-name) : $(t) ;
+ }
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ .all-tests += $(t) ;
+ return $(t) ;
+}
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+rule compile ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
+ ;
+}
+
+
+rule compile-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile-fail : $(sources) : $(requirements) :
+ $(target-name) ] ;
+}
+
+
+rule link ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule link-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+rule handle-input-files ( input-files * )
+{
+ if $(input-files[2])
+ {
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if [ sequence.insertion-sort $(input-files) ] != $(input-files)
+ {
+ errors.user-error "Names of input files must be sorted alphabetically"
+ : "due to internal limitations" ;
+ }
+ }
+ return <testing.input-file>$(input-files) ;
+}
+
+
+rule run ( sources + : args * : input-files * : requirements * : target-name ? :
+ default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule run-fail ( sources + : args * : input-files * : requirements * :
+ target-name ? : default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+IMPORT : alias : : test-suite ;
+
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+rule dump-tests
+{
+ for local t in $(.all-tests)
+ {
+ dump-test $(t) ;
+ }
+}
+
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+local rule get-library-name ( path )
+{
+ # Path is in normalized form, so all slashes are forward.
+ local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
+ local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
+ local match3 = [ MATCH (/status$) : $(path) ] ;
+
+ if $(match1) { return $(match1[2]) ; }
+ else if $(match2) { return $(match2[2]) ; }
+ else if $(match3) { return "" ; }
+ else if --dump-tests in [ modules.peek : ARGV ]
+ {
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return $(path) ;
+ }
+}
+
+
+# Was an XML dump requested?
+.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
+
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+rule dump-test ( target )
+{
+ local type = [ $(target).type ] ;
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+
+ local project-root = [ $(project).get project-root ] ;
+ local library = [ get-library-name [ path.root [ $(project).get location ]
+ [ path.pwd ] ] ] ;
+ if $(library)
+ {
+ name = $(library)/$(name) ;
+ }
+
+ local sources = [ $(target).sources ] ;
+ local source-files ;
+ for local s in $(sources)
+ {
+ if [ class.is-a $(s) : file-reference ]
+ {
+ local location = [ path.root [ path.root [ $(s).name ]
+ [ $(s).location ] ] [ path.pwd ] ] ;
+
+ source-files += [ path.relative-to [ path.root $(project-root)
+ [ path.pwd ] ] $(location) ] ;
+ }
+ }
+
+ local target-name = [ $(project).get location ] // [ $(target).name ] .test
+ ;
+ target-name = $(target-name:J=) ;
+
+ local r = [ $(target).requirements ] ;
+ # Extract values of the <test-info> feature.
+ local test-info = [ $(r).get <test-info> ] ;
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ if $(.out-xml)
+ {
+ local nl = "
+" ;
+ .contents on $(.out-xml) +=
+ "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+ "$(nl) <target><![CDATA[$(target-name)]]></target>"
+ "$(nl) <info><![CDATA[$(test-info)]]></info>"
+ "$(nl) <source><![CDATA[$(source-files)]]></source>"
+ "$(nl) </test>"
+ ;
+ }
+ else
+ {
+ # Format them into a single string of quoted strings.
+ test-info = \"$(test-info:J=\"\ \")\" ;
+
+ ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
+ \"$(source-files)\" ;
+ }
+}
+
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register-standard testing.expect-success : OBJ : COMPILE ;
+generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
+generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
+generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
+generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
+generators.register-standard testing.expect-success : EXE : LINK ;
+
+# Generator which runs an EXE and captures output.
+generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
+
+
+# The action rules called by generators.
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Dump all the tests, if needed. We do it here, since dump should happen
+ # only after all Jamfiles have been read, and there is no such place
+ # currently defined (but there should be).
+ if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
+ {
+ .dumped-tests = true ;
+ dump-tests ;
+ }
+
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+
+rule run-path-setup ( target : source : properties * )
+{
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
+ dll-paths += [ on $(source) return $(RUN_PATH) ] ;
+ dll-paths = [ sequence.unique $(dll-paths) ] ;
+ if $(dll-paths)
+ {
+ dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(dll-paths) ] ;
+ }
+}
+
+
+local argv = [ modules.peek : ARGV ] ;
+
+toolset.flags testing.capture-output ARGS <testing.arg> ;
+toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
+toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
+
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ run-path-setup $(target) : $(source) : $(properties) ;
+
+ if [ feature.get-values preserve-test-targets : $(properties) ] = off
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
+toolset.flags testing.unit-test ARGS <testing.arg> ;
+
+
+rule unit-test ( target : source : properties * )
+{
+ run-path-setup $(target) : $(source) : $(properties) ;
+}
+
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
+}
+
+
+IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
+ : : compile compile-fail run run-fail link link-fail ;
+
+
+type.register TIME : time ;
+generators.register-standard testing.time : : TIME ;
+
+
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+}
+
+
+IMPORT testing : record-time : : testing.record-time ;
+
+
+# Calling this rule requests that Boost Build time how long it taks to build the
+# 'source' target and display the results both on the standard output and in the
+# 'target' file.
+#
+rule time ( target : source : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
+
+ # Make sure that the source is rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(source) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME)" seconds" > "$(<)"
+ echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
+}
diff --git a/jam-files/boost-build/tools/types/asm.jam b/jam-files/boost-build/tools/types/asm.jam
new file mode 100644
index 000000000..a340db36a
--- /dev/null
+++ b/jam-files/boost-build/tools/types/asm.jam
@@ -0,0 +1,4 @@
+# Copyright Craig Rodrigues 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type ASM : s S asm ;
diff --git a/jam-files/boost-build/tools/types/cpp.jam b/jam-files/boost-build/tools/types/cpp.jam
new file mode 100644
index 000000000..3159cdd77
--- /dev/null
+++ b/jam-files/boost-build/tools/types/cpp.jam
@@ -0,0 +1,86 @@
+# Copyright David Abrahams 2004.
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+import type ;
+import scanner ;
+
+class c-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import sequence ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ for local i in $(includes)
+ {
+ self.includes += [ sequence.transform path.native
+ : [ regex.split $(i:G=) "&&" ] ] ;
+ }
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
+ angle = [ sequence.transform path.native : $(angle) ] ;
+ local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
+ quoted = [ sequence.transform path.native : $(quoted) ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on including file (we can not get literal "." in include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+
+ ISFILE $(angle) $(quoted) ;
+ }
+}
+
+scanner.register c-scanner : include ;
+
+type.register CPP : cpp cxx cc ;
+type.register H : h ;
+type.register HPP : hpp : H ;
+type.register C : c ;
+
+# It most cases where a CPP file or a H file is a source of some action, we
+# should rebuild the result if any of files included by CPP/H are changed. One
+# case when this is not needed is installation, which is handled specifically.
+type.set-scanner CPP : c-scanner ;
+type.set-scanner C : c-scanner ;
+# One case where scanning of H/HPP files is necessary is PCH generation -- if
+# any header included by HPP being precompiled changes, we need to recompile the
+# header.
+type.set-scanner H : c-scanner ;
+type.set-scanner HPP : c-scanner ;
diff --git a/jam-files/boost-build/tools/types/exe.jam b/jam-files/boost-build/tools/types/exe.jam
new file mode 100644
index 000000000..47109513a
--- /dev/null
+++ b/jam-files/boost-build/tools/types/exe.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register EXE ;
+type.set-generated-target-suffix EXE : <target-os>windows : "exe" ;
+type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ;
diff --git a/jam-files/boost-build/tools/types/html.jam b/jam-files/boost-build/tools/types/html.jam
new file mode 100644
index 000000000..5cd337d09
--- /dev/null
+++ b/jam-files/boost-build/tools/types/html.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type HTML : html ;
diff --git a/jam-files/boost-build/tools/types/lib.jam b/jam-files/boost-build/tools/types/lib.jam
new file mode 100644
index 000000000..854ab8fd5
--- /dev/null
+++ b/jam-files/boost-build/tools/types/lib.jam
@@ -0,0 +1,74 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ; # for set-generated-target-suffix
+import os ;
+
+# The following naming scheme is used for libraries.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (msvc)
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (mingw):
+# libxxx.a static library
+# libxxx.dll DLL
+# libxxx.dll.a import library
+#
+# On cygwin i.e. <target-os>cygwin
+# libxxx.a static library
+# cygxxx.dll DLL
+# libxxx.dll.a import library
+#
+
+type.register LIB ;
+
+# FIXME: should not register both extensions on both platforms.
+type.register STATIC_LIB : a lib : LIB ;
+
+# The 'lib' prefix is used everywhere
+type.set-generated-target-prefix STATIC_LIB : : lib ;
+
+# Use '.lib' suffix for windows
+type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ;
+
+# Except with gcc.
+type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ;
+
+# Use xxx.lib for import libs
+type IMPORT_LIB : : STATIC_LIB ;
+type.set-generated-target-prefix IMPORT_LIB : : "" ;
+type.set-generated-target-suffix IMPORT_LIB : : lib ;
+
+# Except with gcc (mingw or cygwin), where use libxxx.dll.a
+type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ;
+type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ;
+
+type.register SHARED_LIB : so dll dylib : LIB ;
+
+# Both mingw and cygwin use libxxx.dll naming scheme.
+# On Linux, use "lib" prefix
+type.set-generated-target-prefix SHARED_LIB : : lib ;
+# But don't use it on windows
+type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ;
+# But use it again on mingw
+type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ;
+# And use 'cyg' on cygwin
+type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ;
+
+
+type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ;
+
+type SEARCHED_LIB : : LIB ;
+# This is needed so that when we create a target of SEARCHED_LIB
+# type, there's no prefix or suffix automatically added.
+type.set-generated-target-prefix SEARCHED_LIB : : "" ;
+type.set-generated-target-suffix SEARCHED_LIB : : "" ;
diff --git a/jam-files/boost-build/tools/types/obj.jam b/jam-files/boost-build/tools/types/obj.jam
new file mode 100644
index 000000000..6afbcaa6f
--- /dev/null
+++ b/jam-files/boost-build/tools/types/obj.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register OBJ : o obj ;
+type.set-generated-target-suffix OBJ : <target-os>windows : obj ;
+type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ;
diff --git a/jam-files/boost-build/tools/types/objc.jam b/jam-files/boost-build/tools/types/objc.jam
new file mode 100644
index 000000000..709cbd0c7
--- /dev/null
+++ b/jam-files/boost-build/tools/types/objc.jam
@@ -0,0 +1,26 @@
+# Copyright Rene Rivera 2008, 2010.
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+import type ;
+import scanner ;
+import types/cpp ;
+
+class objc-scanner : c-scanner
+{
+ rule __init__ ( includes * )
+ {
+ c-scanner.__init__ $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ;
+ }
+}
+
+scanner.register objc-scanner : include ;
+
+type.register OBJECTIVE_C : m ;
+type.register OBJECTIVE_CPP : mm ;
+type.set-scanner OBJECTIVE_C : objc-scanner ;
+type.set-scanner OBJECTIVE_CPP : objc-scanner ;
diff --git a/jam-files/boost-build/tools/types/preprocessed.jam b/jam-files/boost-build/tools/types/preprocessed.jam
new file mode 100644
index 000000000..c9187ba67
--- /dev/null
+++ b/jam-files/boost-build/tools/types/preprocessed.jam
@@ -0,0 +1,9 @@
+# Copyright Steven Watanabe 2011
+# Distributed under the Boost Software License Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register PREPROCESSED_C : i : C ;
+type.register PREPROCESSED_CPP : ii : CPP ;
diff --git a/jam-files/boost-build/tools/types/qt.jam b/jam-files/boost-build/tools/types/qt.jam
new file mode 100644
index 000000000..6d1dfbd42
--- /dev/null
+++ b/jam-files/boost-build/tools/types/qt.jam
@@ -0,0 +1,10 @@
+# Copyright Vladimir Prus 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+type UI : ui ;
+type QRC : qrc ;
+type MOCCABLE_CPP ;
+type MOCCABLE_H ;
+# Result of running moc.
+type MOC : moc : H ;
diff --git a/jam-files/boost-build/tools/types/register.jam b/jam-files/boost-build/tools/types/register.jam
new file mode 100644
index 000000000..203992ca9
--- /dev/null
+++ b/jam-files/boost-build/tools/types/register.jam
@@ -0,0 +1,39 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module's job is to automatically import all the type
+# registration modules in its directory.
+import type os path modules ;
+
+# Register the given type on the specified OSes, or on remaining OSes
+# if os is not specified. This rule is injected into each of the type
+# modules for the sake of convenience.
+local rule type ( type : suffixes * : base-type ? : os * )
+{
+ if ! [ type.registered $(type) ]
+ {
+ if ( ! $(os) ) || [ os.name ] in $(os)
+ {
+ type.register $(type) : $(suffixes) : $(base-type) ;
+ }
+ }
+}
+
+.this-module's-file = [ modules.binding $(__name__) ] ;
+.this-module's-dir = [ path.parent $(.this-module's-file) ] ;
+.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ;
+.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ;
+
+# A loop over all modules in this directory
+for m in $(.sibling-modules)
+{
+ m = [ path.basename $(m) ] ;
+ m = types/$(m) ;
+
+ # Inject the type rule into the new module
+ IMPORT $(__name__) : type : $(m) : type ;
+ import $(m) ;
+}
+
+
diff --git a/jam-files/boost-build/tools/types/rsp.jam b/jam-files/boost-build/tools/types/rsp.jam
new file mode 100644
index 000000000..bdf8a7c98
--- /dev/null
+++ b/jam-files/boost-build/tools/types/rsp.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type RSP : rsp ;
diff --git a/jam-files/boost-build/tools/unix.jam b/jam-files/boost-build/tools/unix.jam
new file mode 100644
index 000000000..75949851a
--- /dev/null
+++ b/jam-files/boost-build/tools/unix.jam
@@ -0,0 +1,224 @@
+# Copyright (c) 2004 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file implements linking semantic common to all unixes. On unix, static
+# libraries must be specified in a fixed order on the linker command line. Generators
+# declared there store information about the order and use it property.
+
+import feature ;
+import "class" : new ;
+import generators ;
+import type ;
+import set ;
+import order ;
+import builtin ;
+
+class unix-linking-generator : linking-generator
+{
+ import property-set ;
+ import type ;
+ import unix ;
+
+ rule __init__ ( id
+ composing ? : # Specify if generator is composing. The generator will be
+ # composing if non-empty string is passed, or parameter is
+ # not given. To make generator non-composing, pass empty
+ # string ("")
+ source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ linking-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ;
+ local libraries ;
+ for local l in $(sources)
+ {
+ if [ type.is-derived [ $(l).type ] LIB ]
+ {
+ libraries += $(l) ;
+ }
+ else
+ {
+ sources2 += $(l) ;
+ }
+ }
+
+ sources = $(sources2) [ unix.order-libraries $(libraries) ] ;
+
+ return [ linking-generator.generated-targets $(sources) : $(property-set)
+ : $(project) $(name) ] ;
+ }
+
+}
+
+class unix-archive-generator : archive-generator
+{
+ import unix ;
+
+ rule __init__ ( id composing ? : source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ archive-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+
+ }
+}
+
+class unix-searched-lib-generator : searched-lib-generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule optional-properties ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local result = [ searched-lib-generator.run $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+}
+
+class unix-prebuilt-lib-generator : generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ unix.set-library-order-aux $(f) : $(sources) ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB
+ : <file> <toolset>unix ] ;
+
+generators.override unix.prebuilt : builtin.lib-generator ;
+
+
+# Declare generators
+generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE
+ : <toolset>unix ] ;
+
+generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-searched-lib-generator
+ unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ;
+
+
+# The derived toolset must specify their own actions.
+actions link {
+}
+
+actions link.dll {
+}
+
+actions archive {
+}
+
+actions searched-lib-generator {
+}
+
+actions prebuilt {
+}
+
+
+
+
+
+.order = [ new order ] ;
+
+rule set-library-order-aux ( from * : to * )
+{
+ for local f in $(from)
+ {
+ for local t in $(to)
+ {
+ if $(f) != $(t)
+ {
+ $(.order).add-pair $(f) $(t) ;
+ }
+ }
+ }
+}
+
+rule set-library-order ( sources * : property-set : result * )
+{
+ local used-libraries ;
+ local deps = [ $(property-set).dependency ] ;
+ for local l in $(sources) $(deps:G=)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ used-libraries += $(l) ;
+ }
+ }
+
+ local created-libraries ;
+ for local l in $(result)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ created-libraries += $(l) ;
+ }
+ }
+
+ created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ;
+ set-library-order-aux $(created-libraries) : $(used-libraries) ;
+}
+
+rule order-libraries ( libraries * )
+{
+ local r = [ $(.order).order $(libraries) ] ;
+ return $(r) ;
+}
+ \ No newline at end of file
diff --git a/jam-files/boost-build/tools/vacpp.jam b/jam-files/boost-build/tools/vacpp.jam
new file mode 100644
index 000000000..f4080fc04
--- /dev/null
+++ b/jam-files/boost-build/tools/vacpp.jam
@@ -0,0 +1,150 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Boost.Build V2 toolset for the IBM XL C++ compiler
+#
+
+import toolset : flags ;
+import feature ;
+import common ;
+import generators ;
+import os ;
+
+feature.extend toolset : vacpp ;
+toolset.inherit vacpp : unix ;
+generators.override vacpp.prebuilt : builtin.prebuilt ;
+generators.override vacpp.searched-lib-generator : searched-lib-generator ;
+
+# Configure the vacpp toolset
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters vacpp : version $(version) ] ;
+
+ command = [ common.get-invocation-command vacpp : xlC
+ : $(command) : "/usr/vacpp/bin/xlC" ] ;
+
+ common.handle-options vacpp : $(condition) : $(command) : $(options) ;
+}
+
+# Declare generators
+generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ;
+generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ;
+
+# Allow C++ style comments in C files
+flags vacpp CFLAGS : -qcpluscmt ;
+
+# Declare flags
+flags vacpp CFLAGS <optimization>off : -qNOOPTimize ;
+flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ;
+flags vacpp CFLAGS <optimization>space : -O2 -qcompact ;
+
+# Discretionary inlining (not recommended)
+flags vacpp CFLAGS <inlining>off : -qnoinline ;
+flags vacpp CFLAGS <inlining>on : -qinline ;
+#flags vacpp CFLAGS <inlining>full : -qinline ;
+flags vacpp CFLAGS <inlining>full : ;
+
+# Exception handling
+flags vacpp C++FLAGS <exception-handling>off : -qnoeh ;
+flags vacpp C++FLAGS <exception-handling>on : -qeh ;
+
+# Run-time Type Identification
+flags vacpp C++FLAGS <rtti>off : -qnortti ;
+flags vacpp C++FLAGS <rtti>on : -qrtti ;
+
+# Enable 64-bit memory addressing model
+flags vacpp CFLAGS <address-model>64 : -q64 ;
+flags vacpp LINKFLAGS <address-model>64 : -q64 ;
+flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ;
+
+# Use absolute path when generating debug information
+flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>off : -s ;
+
+if [ os.name ] = AIX
+{
+ flags vacpp.compile C++FLAGS : -qfuncsect ;
+
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definately for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+ flags vacpp.link LINKFLAGS <link>shared : -bnoipath ;
+
+ # Run-time linking
+ flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ;
+}
+else
+{
+ # Linux PPC
+ flags vacpp.compile CFLAGS <link>shared : -qpic=large ;
+ flags vacpp FINDLIBS : rt ;
+}
+
+# Profiling
+flags vacpp CFLAGS <profiling>on : -pg ;
+flags vacpp LINKFLAGS <profiling>on : -pg ;
+
+flags vacpp.compile OPTIONS <cflags> ;
+flags vacpp.compile.c++ OPTIONS <cxxflags> ;
+flags vacpp DEFINES <define> ;
+flags vacpp UNDEFS <undef> ;
+flags vacpp HDRS <include> ;
+flags vacpp STDHDRS <sysinclude> ;
+flags vacpp.link OPTIONS <linkflags> ;
+flags vacpp ARFLAGS <arflags> ;
+
+flags vacpp LIBPATH <library-path> ;
+flags vacpp NEEDLIBS <library-file> ;
+flags vacpp FINDLIBS <find-shared-library> ;
+flags vacpp FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags vacpp VA_C_COMPILER <threading>single : xlc ;
+flags vacpp VA_C_COMPILER <threading>multi : xlc_r ;
+flags vacpp VA_CXX_COMPILER <threading>single : xlC ;
+flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ;
+
+SPACE = " " ;
+
+flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ;
+
+actions vacpp.link bind NEEDLIBS
+{
+ $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.link.dll bind NEEDLIBS
+{
+ xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.compile.c
+{
+ $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions vacpp.compile.c++
+{
+ $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions updated together piecemeal vacpp.archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/whale.jam b/jam-files/boost-build/tools/whale.jam
new file mode 100644
index 000000000..9335ff0c0
--- /dev/null
+++ b/jam-files/boost-build/tools/whale.jam
@@ -0,0 +1,116 @@
+# Copyright (C) Vladimir Prus 2002-2005.
+
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements support for Whale/Dolphin/WD parser/lexer tools.
+# See http://www.cs.queensu.ca/home/okhotin/whale/ for details.
+#
+# There are three interesting target types:
+# - WHL (the parser sources), that are converted to CPP and H
+# - DLP (the lexer sources), that are converted to CPP and H
+# - WD (combined parser/lexer sources), that are converted to WHL + DLP
+
+import type ;
+import generators ;
+import path ;
+import "class" : new ;
+import errors ;
+
+rule init ( path # path the Whale/Dolphin/WD binaries
+ )
+{
+ if $(.configured) && $(.path) != $(path)
+ {
+ errors.user-error "Attempt to reconfigure Whale support" :
+ "Previously configured with path \"$(.path:E=<empty>)\"" :
+ "Now configuring with path \"$(path:E=<empty>)\"" ;
+
+ }
+ .configured = true ;
+ .path = $(path) ;
+
+ .whale = [ path.join $(path) whale ] ;
+ .dolphin = [ path.join $(path) dolphin ] ;
+ .wd = [ path.join $(path) wd ] ;
+}
+
+
+# Declare the types.
+type.register WHL : whl ;
+type.register DLP : dlp ;
+type.register WHL_LR0 : lr0 ;
+type.register WD : wd ;
+
+# Declare standard generators.
+generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ;
+generators.register-standard whale.dolphin : DLP : CPP H ;
+generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ;
+
+# The conversions defines above a ambiguious when we generated CPP from WD.
+# We can either go via WHL type, or via DLP type.
+# The following custom generator handles this by running both conversions.
+
+class wd-to-cpp : generator
+{
+ rule __init__ ( * : * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) ;
+ }
+
+ rule run ( project name ? : property-set : source * )
+ {
+ if ! $(source[2])
+ {
+ local new-sources ;
+ if ! [ $(source).type ] in WHL DLP
+ {
+ local r1 = [ generators.construct $(project) $(name)
+ : WHL : $(property-set) : $(source) ] ;
+ local r2 = [ generators.construct $(project) $(name)
+ : DLP : $(property-set) : $(source) ] ;
+
+ new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
+ }
+ else
+ {
+ new-sources = $(source) ;
+ }
+
+ local result ;
+ for local i in $(new-sources)
+ {
+ local t = [ generators.construct $(project) $(name) : CPP
+ : $(property-set) : $(i) ] ;
+ result += $(t[2-]) ;
+ }
+ return $(result) ;
+ }
+ }
+
+}
+
+
+generators.override whale.wd-to-cpp : whale.whale ;
+generators.override whale.wd-to-cpp : whale.dolphin ;
+
+
+generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ;
+
+
+actions whale
+{
+ $(.whale) -d $(<[1]:D) $(>)
+}
+
+actions dolphin
+{
+ $(.dolphin) -d $(<[1]:D) $(>)
+}
+
+actions wd
+{
+ $(.wd) -d $(<[1]:D) -g $(>)
+}
+
diff --git a/jam-files/boost-build/tools/xlf.jam b/jam-files/boost-build/tools/xlf.jam
new file mode 100644
index 000000000..e7fcc6086
--- /dev/null
+++ b/jam-files/boost-build/tools/xlf.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# toolset configuration for the IBM Fortran compiler (xlf)
+#
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags xlf OPTIONS <optimization>off : -O0 ;
+flags xlf OPTIONS <optimization>speed : -O3 ;
+flags xlf OPTIONS <optimization>space : -Os ;
+
+flags xlf OPTIONS <debug-symbols>on : -g ;
+flags xlf OPTIONS <profiling>on : -pg ;
+
+flags xlf DEFINES <define> ;
+flags xlf INCLUDES <include> ;
+
+rule compile-fortran
+{
+}
+
+actions compile-fortran
+{
+ xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/xsltproc-config.jam b/jam-files/boost-build/tools/xsltproc-config.jam
new file mode 100644
index 000000000..de54a2eb3
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc-config.jam
@@ -0,0 +1,37 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Python tools and librries. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ;
+ xsltproc-path = $(xsltproc-path[1]) ;
+
+ if $(xsltproc-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
+ }
+ using xsltproc : $(xsltproc-path) ;
+ }
+}
+else
+{
+ local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ;
+ xsltproc-path = $(xsltproc-path[1]) ;
+
+ if $(xsltproc-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
+ }
+ using xsltproc : $(xsltproc-path) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/xsltproc.jam b/jam-files/boost-build/tools/xsltproc.jam
new file mode 100644
index 000000000..96f5170be
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc.jam
@@ -0,0 +1,194 @@
+# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# This module defines rules to apply an XSLT stylesheet to an XML file using the
+# xsltproc driver, part of libxslt.
+#
+# Note: except for 'init', this modules does not provide any rules for end
+# users.
+
+import feature ;
+import regex ;
+import sequence ;
+import common ;
+import os ;
+import modules ;
+import path ;
+import errors ;
+
+feature.feature xsl:param : : free ;
+feature.feature xsl:path : : free ;
+feature.feature catalog : : free ;
+
+
+# Initialize xsltproc support. The parameters are:
+# xsltproc: The xsltproc executable
+#
+rule init ( xsltproc ? )
+{
+ if $(xsltproc)
+ {
+ modify-config ;
+ .xsltproc = $(xsltproc) ;
+ check-xsltproc ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ .xsltproc ?= [ modules.peek : XSLTPROC ] ;
+ .xsltproc ?= xsltproc ;
+ check-xsltproc ;
+ .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
+ }
+}
+
+rule modify-config
+{
+ if $(.config-frozen)
+ {
+ errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ;
+ }
+}
+
+rule check-xsltproc ( )
+{
+ if $(.xsltproc)
+ {
+ local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
+ }
+ }
+}
+
+# Returns a non-empty string if a cygwin xsltproc binary was specified.
+rule is-cygwin ( )
+{
+ freeze-config ;
+ return $(.is-cygwin) ;
+}
+
+rule .is-cygwin ( xsltproc )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native
+ [ path.join [ path.parent $(file) ] xsltproc ] ] ;
+ if [ os.name ] = CYGWIN
+ {
+ dir = $(dir:W) ;
+ }
+ local command =
+ "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
+ local status = [ SHELL $(command) : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ return true ;
+ }
+ }
+}
+
+rule compute-xslt-flags ( target : properties * )
+{
+ local flags ;
+
+ # Raw flags.
+ flags += [ feature.get-values <flags> : $(properties) ] ;
+
+ # Translate <xsl:param> into command line flags.
+ for local param in [ feature.get-values <xsl:param> : $(properties) ]
+ {
+ local namevalue = [ regex.split $(param) "=" ] ;
+ flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
+ }
+
+ # Translate <xsl:path>.
+ for local path in [ feature.get-values <xsl:path> : $(properties) ]
+ {
+ flags += --path \"$(path:G=)\" ;
+ }
+
+ # Take care of implicit dependencies.
+ local other-deps ;
+ for local dep in [ feature.get-values <implicit-dependency> : $(properties) ]
+ {
+ other-deps += [ $(dep:G=).creating-subvariant ] ;
+ }
+
+ local implicit-target-directories ;
+ for local dep in [ sequence.unique $(other-deps) ]
+ {
+ implicit-target-directories += [ $(dep).all-target-directories ] ;
+ }
+
+ for local dir in $(implicit-target-directories)
+ {
+ flags += --path \"$(dir:T)\" ;
+ }
+
+ return $(flags) ;
+}
+
+
+local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action )
+{
+ freeze-config ;
+ STYLESHEET on $(target) = $(stylesheet) ;
+ FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
+ NAME on $(target) = $(.xsltproc) ;
+
+ for local catalog in [ feature.get-values <catalog> : $(properties) ]
+ {
+ CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ;
+ }
+
+ if [ os.on-windows ] && ! [ is-cygwin ]
+ {
+ action = $(action).windows ;
+ }
+
+ $(action) $(target) : $(source) ;
+}
+
+
+rule xslt ( target : source stylesheet : properties * )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ;
+}
+
+
+rule xslt-dir ( target : source stylesheet : properties * : dirname )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ;
+}
+
+actions xslt-xsltproc.windows
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
+}
+
+
+actions xslt-xsltproc-dir.windows bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc-dir bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
+}
diff --git a/jam-files/boost-build/tools/xsltproc/included.xsl b/jam-files/boost-build/tools/xsltproc/included.xsl
new file mode 100644
index 000000000..ef86394a9
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/included.xsl
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xml b/jam-files/boost-build/tools/xsltproc/test.xml
new file mode 100644
index 000000000..57c8ba187
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/test.xml
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root/>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xsl b/jam-files/boost-build/tools/xsltproc/test.xsl
new file mode 100644
index 000000000..a142c91dd
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/test.xsl
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+ <xsl:include href="included.xsl"/>
+</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/zlib.jam b/jam-files/boost-build/tools/zlib.jam
new file mode 100644
index 000000000..f9138fd57
--- /dev/null
+++ b/jam-files/boost-build/tools/zlib.jam
@@ -0,0 +1,92 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the zlib library
+#
+# After 'using zlib', the following targets are available:
+#
+# /zlib//zlib -- The zlib library
+
+
+# In addition to direct purpose of supporting zlib, this module also
+# serves as canonical example of how third-party condiguration works
+# in Boost.Build. The operation is as follows
+#
+# - For each 'using zlib : condition ... : ...' we create a target alternative
+# for zlib, with the specified condition.
+# - There's one target alternative for 'zlib' with no specific condition
+# properties.
+#
+# Two invocations of 'using zlib' with the same condition but different
+# properties are not permitted, e.g.:
+#
+# using zlib : condition <target-os>windows : include foo ;
+# using zlib : condition <target-os>windows : include bar ;
+#
+# is in error. One exception is for empty condition, 'using' without any
+# parameters is overridable. That is:
+#
+# using zlib ;
+# using zlib : include foo ;
+#
+# Is OK then the first 'using' is ignored. Likewise if the order of the statements
+# is reversed.
+#
+# When 'zlib' target is built, a target alternative is selected as usual for
+# Boost.Build. The selected alternative is a custom target class, which:
+#
+# - calls ac.find-include-path to find header path. If explicit path is provided
+# in 'using', only that path is checked, and if no header is found there, error
+# is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE
+# environment variable, and failing that, in standard directories.
+# [TODO: document sysroot handling]
+# - calls ac.find-library to find the library, in an identical fashion.
+#
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+
+project.initialize $(__name__) ;
+project = [ project.current ] ;
+project zlib ;
+
+header = zlib.h ;
+names = z zlib zll zdll ;
+
+.default-alternative = [ new ac-library zlib : $(project) ] ;
+$(.default-alternative).set-header $(header) ;
+$(.default-alternative).set-default-names $(names) ;
+targets.main-target-alternative $(.default-alternative) ;
+
+rule init ( * : * )
+{
+ if ! $(condition)
+ {
+ # Special case the no-condition case so that 'using' without parameters
+ # can mix with more specific 'using'.
+ $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ # FIXME: consider if we should allow overriding definitions for a given
+ # condition -- e.g. project-config.jam might want to override whatever is
+ # in user-config.jam.
+ local mt = [ new ac-library zlib : $(project)
+ : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+}
+
+
+
+
+
+
diff --git a/jam-files/boost-build/user-config.jam b/jam-files/boost-build/user-config.jam
new file mode 100644
index 000000000..81091a684
--- /dev/null
+++ b/jam-files/boost-build/user-config.jam
@@ -0,0 +1,92 @@
+# Copyright 2003, 2005 Douglas Gregor
+# Copyright 2004 John Maddock
+# Copyright 2002, 2003, 2004, 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is used to configure your Boost.Build installation. You can modify
+# this file in place, or you can place it in a permanent location so that it
+# does not get overwritten should you get a new version of Boost.Build. See:
+#
+# http://www.boost.org/boost-build2/doc/html/bbv2/overview/configuration.html
+#
+# for documentation about possible permanent locations.
+
+# This file specifies which toolsets (C++ compilers), libraries, and other
+# tools are available. Often, you should be able to just uncomment existing
+# example lines and adjust them to taste. The complete list of supported tools,
+# and configuration instructions can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/reference/tools.html
+#
+
+# This file uses Jam language syntax to describe available tools. Mostly,
+# there are 'using' lines, that contain the name of the used tools, and
+# parameters to pass to those tools -- where paremeters are separated by
+# semicolons. Important syntax notes:
+#
+# - Both ':' and ';' must be separated from other tokens by whitespace
+# - The '\' symbol is a quote character, so when specifying Windows paths you
+# should use '/' or '\\' instead.
+#
+# More details about the syntax can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/advanced.html#bbv2.advanced.jam_language
+#
+
+# ------------------
+# GCC configuration.
+# ------------------
+
+# Configure gcc (default version).
+#using gcc ;
+
+# Configure specific gcc version, giving alternative name to use.
+# using gcc : 3.2 : g++-3.2 ;
+
+
+# -------------------
+# MSVC configuration.
+# -------------------
+
+# Configure msvc (default version, searched for in standard locations and PATH).
+# using msvc ;
+
+# Configure specific msvc version (searched for in standard locations and PATH).
+# using msvc : 8.0 ;
+
+
+# ----------------------
+# Borland configuration.
+# ----------------------
+# using borland ;
+
+
+# ----------------------
+# STLPort configuration.
+# ----------------------
+
+# Configure specifying location of STLPort headers. Libraries must be either
+# not needed or available to the compiler by default.
+# using stlport : : /usr/include/stlport ;
+
+# Configure specifying location of both headers and libraries explicitly.
+# using stlport : : /usr/include/stlport /usr/lib ;
+
+
+# -----------------
+# QT configuration.
+# -----------------
+
+# Configure assuming QTDIR gives the installation prefix.
+# using qt ;
+
+# Configure with an explicit installation prefix.
+# using qt : /usr/opt/qt ;
+
+# ---------------------
+# Python configuration.
+# ---------------------
+
+# Configure specific Python version.
+# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ;
diff --git a/jam-files/boost-build/util/assert.jam b/jam-files/boost-build/util/assert.jam
new file mode 100644
index 000000000..abedad525
--- /dev/null
+++ b/jam-files/boost-build/util/assert.jam
@@ -0,0 +1,336 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import modules ;
+
+
+################################################################################
+#
+# Private implementation details.
+#
+################################################################################
+
+# Rule added as a replacement for the regular Jam = operator but which does not
+# ignore trailing empty string elements.
+#
+local rule exact-equal-test ( lhs * : rhs * )
+{
+ local lhs_extended = $(lhs) xxx ;
+ local rhs_extended = $(rhs) xxx ;
+ if $(lhs_extended) = $(rhs_extended)
+ {
+ return true ;
+ }
+}
+
+
+# Two lists are considered set-equal if they contain the same elements, ignoring
+# duplicates and ordering.
+#
+local rule set-equal-test ( set1 * : set2 * )
+{
+ if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+################################################################################
+#
+# Public interface.
+#
+################################################################################
+
+# Assert the equality of A and B, ignoring trailing empty string elements.
+#
+rule equal ( a * : b * )
+{
+ if $(a) != $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a false
+# logical value (is either an empty list or all empty strings).
+#
+rule false ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) ] ;
+ }
+
+ if $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected false result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) ] "]" : Got: "[" \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that ELEMENT is present in LIST.
+#
+rule "in" ( element : list * )
+{
+ if ! $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in
+ "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B, ignoring trailing empty string elements.
+#
+rule not-equal ( a * : b * )
+{
+ if $(a) = $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that ELEMENT is not present in LIST.
+#
+rule not-in ( element : list * )
+{
+ if $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Did not expect
+ \"$(element)\" in "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B as sets.
+#
+rule not-set-equal ( a * : b * )
+{
+ if [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to not be equal as sets ;
+ }
+}
+
+
+# Assert that A and B are not exactly equal, not ignoring trailing empty string
+# elements.
+#
+rule not-exact-equal ( a * : b * )
+{
+ if [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ;
+ }
+}
+
+
+# Assert that EXPECTED is the result of calling RULE-NAME with the given
+# arguments.
+#
+rule result ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) ] ;
+ }
+
+ if ! [ exact-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
+# to the result of calling RULE-NAME with the given arguments. Note that rules
+# called this way may accept at most 8 parameters.
+#
+rule result-set-equal ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) ] ;
+ }
+
+ if ! [ set-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert the equality of A and B as sets.
+#
+rule set-equal ( a * : b * )
+{
+ if ! [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to be equal as sets ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a true
+# logical value (is neither an empty list nor all empty strings).
+#
+rule true ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) ] ;
+ }
+
+ if ! $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected true result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) ] "]" ;
+ }
+}
+
+
+# Assert the exact equality of A and B, not ignoring trailing empty string
+# elements.
+#
+rule exact-equal ( a * : b * )
+{
+ if ! [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ;
+ }
+}
+
+
+# Assert that the given variable is not an empty list.
+#
+rule variable-not-empty ( name )
+{
+ local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
+ if ! $(value)-is-not-empty
+ {
+ errors.error-skip-frames 3 assertion failure: Expected variable
+ \"$(name)\" not to be an empty list ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Helper rule used to avoid test duplication related to different list
+ # equality test rules.
+ #
+ local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
+ {
+ local not-equality-assert = not-$(equality-assert) ;
+
+ # When the given equality test is expected to ignore trailing empty
+ # strings some of the test results should be inverted.
+ local not-equality-assert-i = not-$(equality-assert) ;
+ if $(ignore-trailing-empty-strings)
+ {
+ not-equality-assert-i = $(equality-assert) ;
+ }
+
+ $(equality-assert) : ;
+ $(equality-assert) "" "" : "" "" ;
+ $(not-equality-assert-i) : "" "" ;
+ $(equality-assert) x : x ;
+ $(not-equality-assert) : x ;
+ $(not-equality-assert) "" : x ;
+ $(not-equality-assert) "" "" : x ;
+ $(not-equality-assert-i) x : x "" ;
+ $(equality-assert) x "" : x "" ;
+ $(not-equality-assert) x : "" x ;
+ $(equality-assert) "" x : "" x ;
+
+ $(equality-assert) 1 2 3 : 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 3 2 1 ;
+ $(not-equality-assert) 1 2 3 : 1 5 3 ;
+ $(not-equality-assert) 1 2 3 : 1 "" 3 ;
+ $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
+ $(not-equality-assert) 1 2 3 : 5 6 7 ;
+
+ # Extra variables used here just to make sure Boost Jam or Boost Build
+ # do not handle lists with empty strings differently depending on
+ # whether they are literals or stored in variables.
+
+ local empty = ;
+ local empty-strings = "" "" ;
+ local x-empty-strings = x "" "" ;
+ local empty-strings-x = "" "" x ;
+
+ $(equality-assert) : $(empty) ;
+ $(not-equality-assert-i) "" : $(empty) ;
+ $(not-equality-assert-i) "" "" : $(empty) ;
+ $(not-equality-assert-i) : $(empty-strings) ;
+ $(not-equality-assert-i) "" : $(empty-strings) ;
+ $(equality-assert) "" "" : $(empty-strings) ;
+ $(equality-assert) $(empty) : $(empty) ;
+ $(equality-assert) $(empty-strings) : $(empty-strings) ;
+ $(not-equality-assert-i) $(empty) : $(empty-strings) ;
+ $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
+ $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
+ $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert-i) x "" : $(x-empty-strings) ;
+ $(equality-assert) x "" "" : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert) "" x : $(empty-strings-x) ;
+ $(equality-assert) "" "" x : $(empty-strings-x) ;
+ }
+
+
+ # ---------------
+ # Equality tests.
+ # ---------------
+
+ run-equality-test equal : ignore-trailing-empty-strings ;
+ run-equality-test exact-equal ;
+
+
+ # -------------------------
+ # assert.set-equal() tests.
+ # -------------------------
+
+ set-equal : ;
+ not-set-equal "" "" : ;
+ set-equal "" "" : "" ;
+ set-equal "" "" : "" "" ;
+ set-equal a b c : a b c ;
+ set-equal a b c : b c a ;
+ set-equal a b c a : a b c ;
+ set-equal a b c : a b c a ;
+ not-set-equal a b c : a b c d ;
+ not-set-equal a b c d : a b c ;
+}
diff --git a/jam-files/boost-build/util/container.jam b/jam-files/boost-build/util/container.jam
new file mode 100644
index 000000000..dd4963938
--- /dev/null
+++ b/jam-files/boost-build/util/container.jam
@@ -0,0 +1,339 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Various container classes.
+
+# Base for container objects. This lets us construct recursive structures. That
+# is containers with containers in them, specifically so we can tell literal
+# values from node values.
+#
+class node
+{
+ rule __init__ (
+ value ? # Optional value to set node to initially.
+ )
+ {
+ self.value = $(value) ;
+ }
+
+ # Set the value of this node, passing nothing will clear it.
+ #
+ rule set ( value * )
+ {
+ self.value = $(value) ;
+ }
+
+ # Get the value of this node.
+ #
+ rule get ( )
+ {
+ return $(self.value) ;
+ }
+}
+
+
+# A simple vector. Interface mimics the C++ std::vector and std::list, with the
+# exception that indices are one (1) based to follow Jam standard.
+#
+# TODO: Possibly add assertion checks.
+#
+class vector : node
+{
+ import numbers ;
+ import utility ;
+ import sequence ;
+
+ rule __init__ (
+ values * # Initial contents of vector.
+ )
+ {
+ node.__init__ ;
+ self.value = $(values) ;
+ }
+
+ # Get the value of the first element.
+ #
+ rule front ( )
+ {
+ return $(self.value[1]) ;
+ }
+
+ # Get the value of the last element.
+ #
+ rule back ( )
+ {
+ return $(self.value[-1]) ;
+ }
+
+ # Get the value of the element at the given index, one based. Access to
+ # elements of recursive structures is supported directly. Specifying
+ # additional index values recursively accesses the elements as containers.
+ # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our
+ # first element, assuming the first element is a container.
+ #
+ rule at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return $(r) ;
+ }
+
+ # Get the value contained in the given element. This has the same
+ # functionality and interface as "at" but in addition gets the value of the
+ # referenced element, assuming it is a "node".
+ #
+ rule get-at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return [ $(r).get ] ;
+ }
+
+ # Insert the given value into the front of the vector pushing the rest of
+ # the elements back.
+ #
+ rule push-front (
+ value # Value to become first element.
+ )
+ {
+ self.value = $(value) $(self.value) ;
+ }
+
+ # Remove the front element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-front ( )
+ {
+ self.value = $(self.value[2-]) ;
+ }
+
+ # Add the given value at the end of the vector.
+ #
+ rule push-back (
+ value # Value to become back element.
+ )
+ {
+ self.value += $(value) ;
+ }
+
+ # Remove the back element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-back ( )
+ {
+ self.value = $(self.value[1--2]) ;
+ }
+
+ # Insert the given value at the given index, one based. The values at and to
+ # the right of the index are pushed back to make room for the new value.
+ # If the index is passed the end of the vector the element is added to the
+ # end.
+ #
+ rule insert (
+ index # The index to insert at, one based.
+ : value # The value to insert.
+ )
+ {
+ local left = $(self.value[1-$(index)]) ;
+ local right = $(self.value[$(index)-]) ;
+ if $(right)-is-not-empty
+ {
+ left = $(left[1--2]) ;
+ }
+ self.value = $(left) $(value) $(right) ;
+ }
+
+ # Remove one or more elements from the vector. The range is inclusive, and
+ # not specifying an end is equivalent to the [start, start] range.
+ #
+ rule erase (
+ start # Index of first element to remove.
+ end ? # Optional, index of last element to remove.
+ )
+ {
+ end ?= $(start) ;
+ local left = $(self.value[1-$(start)]) ;
+ left = $(left[1--2]) ;
+ local right = $(self.value[$(end)-]) ;
+ right = $(right[2-]) ;
+ self.value = $(left) $(right) ;
+ }
+
+ # Remove all elements from the vector.
+ #
+ rule clear ( )
+ {
+ self.value = ;
+ }
+
+ # The number of elements in the vector.
+ #
+ rule size ( )
+ {
+ return [ sequence.length $(self.value) ] ;
+ }
+
+ # Returns "true" if there are NO elements in the vector, empty otherwise.
+ #
+ rule empty ( )
+ {
+ if ! $(self.value)-is-not-empty
+ {
+ return true ;
+ }
+ }
+
+ # Returns the textual representation of content.
+ #
+ rule str ( )
+ {
+ return "[" [ sequence.transform utility.str : $(self.value) ] "]" ;
+ }
+
+ # Sorts the vector inplace, calling 'utility.less' for comparisons.
+ #
+ rule sort ( )
+ {
+ self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ;
+ }
+
+ # Returns true if content is equal to the content of other vector. Uses
+ # 'utility.equal' for comparison.
+ #
+ rule equal ( another )
+ {
+ local mismatch ;
+ local size = [ size ] ;
+ if $(size) = [ $(another).size ]
+ {
+ for local i in [ numbers.range 1 $(size) ]
+ {
+ if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ]
+ {
+ mismatch = true ;
+ }
+ }
+ }
+ else
+ {
+ mismatch = true ;
+ }
+
+ if ! $(mismatch)
+ {
+ return true ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+
+ local v1 = [ new vector ] ;
+ assert.true $(v1).equal $(v1) ;
+ assert.true $(v1).empty ;
+ assert.result 0 : $(v1).size ;
+ assert.result "[" "]" : $(v1).str ;
+ $(v1).push-back b ;
+ $(v1).push-front a ;
+ assert.result "[" a b "]" : $(v1).str ;
+ assert.result a : $(v1).front ;
+ assert.result b : $(v1).back ;
+ $(v1).insert 2 : d ;
+ $(v1).insert 2 : c ;
+ $(v1).insert 4 : f ;
+ $(v1).insert 4 : e ;
+ $(v1).pop-back ;
+ assert.result 5 : $(v1).size ;
+ assert.result d : $(v1).at 3 ;
+ $(v1).pop-front ;
+ assert.result c : $(v1).front ;
+ assert.false $(v1).empty ;
+ $(v1).erase 3 4 ;
+ assert.result 2 : $(v1).size ;
+
+ local v2 = [ new vector q w e r t y ] ;
+ assert.result 6 : $(v2).size ;
+ $(v1).push-back $(v2) ;
+ assert.result 3 : $(v1).size ;
+ local v2-alias = [ $(v1).back ] ;
+ assert.result e : $(v2-alias).at 3 ;
+ $(v1).clear ;
+ assert.true $(v1).empty ;
+ assert.false $(v2-alias).empty ;
+ $(v2).pop-back ;
+ assert.result t : $(v2-alias).back ;
+
+ local v3 = [ new vector ] ;
+ $(v3).push-back [ new vector 1 2 3 4 5 ] ;
+ $(v3).push-back [ new vector a b c ] ;
+ assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ;
+ $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ;
+ assert.result 1 : $(v3).at 1 : 1 ;
+ assert.result b : $(v3).at 2 : 2 ;
+ assert.result a b c : $(v3).get-at 2 ;
+ assert.result 7 8 9 : $(v3).get-at 3 : 2 ;
+
+ local v4 = [ new vector 4 3 6 ] ;
+ $(v4).sort ;
+ assert.result 3 4 6 : $(v4).get ;
+ assert.false $(v4).equal $(v3) ;
+
+ local v5 = [ new vector 3 4 6 ] ;
+ assert.true $(v4).equal $(v5) ;
+ # Check that vectors of different sizes are considered non-equal.
+ $(v5).pop-back ;
+ assert.false $(v4).equal $(v5) ;
+
+ local v6 = [ new vector [ new vector 1 2 3 ] ] ;
+ assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ;
+
+ local v7 = [ new vector 111 222 333 ] ;
+ assert.true $(v7).equal $(v7) ;
+ $(v7).insert 4 : 444 ;
+ assert.result 111 222 333 444 : $(v7).get ;
+ $(v7).insert 999 : xxx ;
+ assert.result 111 222 333 444 xxx : $(v7).get ;
+
+ local v8 = [ new vector "" "" "" ] ;
+ assert.true $(v8).equal $(v8) ;
+ assert.false $(v8).empty ;
+ assert.result 3 : $(v8).size ;
+ assert.result "" : $(v8).at 1 ;
+ assert.result "" : $(v8).at 2 ;
+ assert.result "" : $(v8).at 3 ;
+ assert.result : $(v8).at 4 ;
+ $(v8).insert 2 : 222 ;
+ assert.result 4 : $(v8).size ;
+ assert.result "" 222 "" "" : $(v8).get ;
+ $(v8).insert 999 : "" ;
+ assert.result 5 : $(v8).size ;
+ assert.result "" 222 "" "" "" : $(v8).get ;
+ $(v8).insert 999 : xxx ;
+ assert.result 6 : $(v8).size ;
+ assert.result "" 222 "" "" "" xxx : $(v8).get ;
+
+ # Regression test for a bug causing vector.equal to compare only the first
+ # and the last element in the given vectors.
+ local v9 = [ new vector 111 xxx 222 ] ;
+ local v10 = [ new vector 111 yyy 222 ] ;
+ assert.false $(v9).equal $(v10) ;
+}
diff --git a/jam-files/boost-build/util/doc.jam b/jam-files/boost-build/util/doc.jam
new file mode 100644
index 000000000..a75155882
--- /dev/null
+++ b/jam-files/boost-build/util/doc.jam
@@ -0,0 +1,997 @@
+# Copyright 2002, 2005 Dave Abrahams
+# Copyright 2002, 2003, 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Documentation system, handles --help requests.
+# It defines rules that attach documentation to modules, rules, and variables.
+# Collects and generates documentation for the various parts of the build
+# system. The documentation is collected from comments integrated into the code.
+
+import modules ;
+import print ;
+import set ;
+import container ;
+import "class" ;
+import sequence ;
+import path ;
+
+
+# The type of output to generate.
+# "console" is formated text echoed to the console (the default);
+# "text" is formated text appended to the output file;
+# "html" is HTML output to the file.
+#
+help-output = console ;
+
+
+# The file to output documentation to when generating "text" or "html" help.
+# This is without extension as the extension is determined by the type of
+# output.
+#
+help-output-file = help ;
+
+# Whether to include local rules in help output.
+#
+.option.show-locals ?= ;
+
+# When showing documentation for a module, whether to also generate
+# automatically the detailed docs for each item in the module.
+#
+.option.detailed ?= ;
+
+# Generate debug output as the help is generated and modules are parsed.
+#
+.option.debug ?= ;
+
+# Enable or disable a documentation option.
+#
+local rule set-option (
+ option # The option name.
+ : value ? # Enabled (non-empty), or disabled (empty)
+)
+{
+ .option.$(option) = $(value) ;
+}
+
+
+# Set the type of output.
+#
+local rule set-output ( type )
+{
+ help-output = $(type) ;
+}
+
+
+# Set the output to a file.
+#
+local rule set-output-file ( file )
+{
+ help-output-file = $(file) ;
+}
+
+
+# Extracts the brief comment from a complete comment. The brief comment is the
+# first sentence.
+#
+local rule brief-comment (
+ docs * # The comment documentation.
+)
+{
+ local d = $(docs:J=" ") ;
+ local p = [ MATCH ".*([.])$" : $(d) ] ;
+ if ! $(p) { d = $(d)"." ; }
+ d = $(d)" " ;
+ local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
+ local brief = $(m[1]) ;
+ while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
+ {
+ m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
+ brief += $(m[1]) ;
+ }
+ return $(brief:J="") ;
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-doc (
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-copyright (
+ module-name ? # The name of the module to document.
+ : copyright * # The copyright for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
+ $(module-name).copy-docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for a rule in the current module. If called in the
+# global module, this documents a global rule.
+#
+local rule set-rule-doc (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ is-local ? # Whether the rule is local to the module.
+ : docs * # The documentation for the rule.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).docs = $(docs) ;
+ $(module-name).$(name).is-local = $(is-local) ;
+
+ if ! $(name) in $($(module-name).rules)
+ {
+ $(module-name).rules += $(name) ;
+ }
+}
+
+
+# Specify a class, will turn a rule into a class.
+#
+local rule set-class-doc (
+ name # The name of the class.
+ module-name ? # The name of the module to document.
+ : super-name ? # The super class name.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).is-class = true ;
+ $(module-name).$(name).super-name = $(super-name) ;
+ $(module-name).$(name).class-rules =
+ [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
+ $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
+
+ $(module-name).classes += $(name) ;
+ $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
+ $(module-name).rules =
+ [ set.difference $($(module-name).rules) :
+ $(name) $($(module-name).$(name).class-rules) ] ;
+}
+
+
+# Set the argument call signature of a rule.
+#
+local rule set-rule-arguments-signature (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ : signature * # The arguments signature.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).signature = $(signature) ;
+}
+
+
+# Specifies the documentation for an argument of a rule.
+#
+local rule set-argument-doc (
+ name # The name of the argument.
+ qualifier # Argument syntax qualifier, "*", "+", etc.
+ rule-name # The name of the rule.
+ module-name ? # THe optional name of the module.
+ : docs * # The documentation.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
+ $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).$(rule-name).args)
+ {
+ $(module-name).$(rule-name).args += $(name) ;
+ }
+}
+
+
+# Specifies the documentation for a variable in the current module. If called in
+# the global module, the global variable is documented.
+#
+local rule set-variable-doc (
+ name # The name of the variable.
+ default # The default value.
+ initial # The initial value.
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the variable.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).default = $(default) ;
+ $(module-name).$(name).initial = $(initial) ;
+ $(module-name).$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).variables)
+ {
+ $(module-name).variables += $(name) ;
+ }
+}
+
+
+# Generates a general description of the documentation and help system.
+#
+local rule print-help-top ( )
+{
+ print.section "General command line usage" ;
+
+ print.text " bjam [options] [properties] [targets]
+
+ Options, properties and targets can be specified in any order.
+ " ;
+
+ print.section "Important Options" ;
+
+ print.list-start ;
+ print.list-item "--clean Remove targets instead of building" ;
+ print.list-item "-a Rebuild everything" ;
+ print.list-item "-n Don't execute the commands, only print them" ;
+ print.list-item "-d+2 Show commands as they are executed" ;
+ print.list-item "-d0 Supress all informational messages" ;
+ print.list-item "-q Stop at first error" ;
+ print.list-item "--debug-configuration Diagnose configuration" ;
+ print.list-item "--debug-building Report which targets are built with what properties" ;
+ print.list-item "--debug-generator Diagnose generator search/execution" ;
+ print.list-end ;
+
+ print.section "Further Help"
+ The following options can be used to obtain additional documentation.
+ ;
+
+ print.list-start ;
+ print.list-item "--help-options Print more obscure command line options." ;
+ print.list-item "--help-internal Boost.Build implementation details." ;
+ print.list-item "--help-doc-options Implementation details doc formatting." ;
+ print.list-end ;
+}
+
+
+# Generate Jam/Boost.Jam command usage information.
+#
+local rule print-help-usage ( )
+{
+ print.section "Boost.Jam Usage"
+ "bjam [ options... ] targets..."
+ ;
+ print.list-start ;
+ print.list-item -a;
+ Build all targets, even if they are current. ;
+ print.list-item -fx;
+ Read '"x"' as the Jamfile for building instead of searching for the
+ Boost.Build system. ;
+ print.list-item -jx;
+ Run up to '"x"' commands concurrently. ;
+ print.list-item -n;
+ Do not execute build commands. Instead print out the commands as they
+ would be executed if building. ;
+ print.list-item -ox;
+ Output the used build commands to file '"x"'. ;
+ print.list-item -q;
+ Quit as soon as a build failure is encountered. Without this option
+ Boost.Jam will continue building as many targets as it can.
+ print.list-item -sx=y;
+ Sets a Jam variable '"x"' to the value '"y"', overriding any value that
+ variable would have from the environment. ;
+ print.list-item -tx;
+ Rebuild the target '"x"', even if it is up-to-date. ;
+ print.list-item -v;
+ Display the version of bjam. ;
+ print.list-item --x;
+ Any option not explicitly handled by Boost.Jam remains available to
+ build scripts using the '"ARGV"' variable. ;
+ print.list-item -dn;
+ Enables output of diagnostic messages. The debug level '"n"' and all
+ below it are enabled by this option. ;
+ print.list-item -d+n;
+ Enables output of diagnostic messages. Only the output for debug level
+ '"n"' is enabled. ;
+ print.list-end ;
+ print.section "Debug Levels"
+ Each debug level shows a different set of information. Usually with
+ higher levels producing more verbose information. The following levels
+ are supported: ;
+ print.list-start ;
+ print.list-item 0;
+ Turn off all diagnostic output. Only errors are reported. ;
+ print.list-item 1;
+ Show the actions taken for building targets, as they are executed. ;
+ print.list-item 2;
+ Show "quiet" actions and display all action text, as they are executed. ;
+ print.list-item 3;
+ Show dependency analysis, and target/source timestamps/paths. ;
+ print.list-item 4;
+ Show arguments of shell invocations. ;
+ print.list-item 5;
+ Show rule invocations and variable expansions. ;
+ print.list-item 6;
+ Show directory/header file/archive scans, and attempts at binding to targets. ;
+ print.list-item 7;
+ Show variable settings. ;
+ print.list-item 8;
+ Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
+ print.list-item 9;
+ Show variable manipulation, scanner tokens, and memory usage. ;
+ print.list-item 10;
+ Show execution times for rules. ;
+ print.list-item 11;
+ Show parsing progress of Jamfiles. ;
+ print.list-item 12;
+ Show graph for target dependencies. ;
+ print.list-item 13;
+ Show changes in target status (fate). ;
+ print.list-end ;
+}
+
+
+# Generates description of options controlling the help system. This
+# automatically reads the options as all variables in the doc module of the form
+# ".option.*".
+#
+local rule print-help-options (
+ module-name # The doc module.
+)
+{
+ print.section "Help Options"
+ These are all the options available for enabling or disabling to control
+ the help system in various ways. Options can be enabled or disabled with
+ '"--help-enable-<option>"', and "'--help-disable-<option>'"
+ respectively.
+ ;
+ local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
+ if $(options-to-list)
+ {
+ print.list-start ;
+ for local option in [ sequence.insertion-sort $(options-to-list) ]
+ {
+ local def = disabled ;
+ if $($(module-name)..option.$(option).default) != "(empty)"
+ {
+ def = enabled ;
+ }
+ print.list-item $(option): $($(module-name)..option.$(option).docs)
+ Default is $(def). ;
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate brief documentation for all the known items in the section for a
+# module. Possible sections are: "rules", and "variables".
+#
+local rule print-help-module-section (
+ module # The module name.
+ section # rules or variables.
+ : section-head # The title of the section.
+ section-description * # The detailed description of the section.
+)
+{
+ if $($(module).$(section))
+ {
+ print.section $(section-head) $(section-description) ;
+ print.list-start ;
+ for local item in [ sequence.insertion-sort $($(module).$(section)) ]
+ {
+ local show = ;
+ if ! $($(module).$(item).is-local)
+ {
+ show = yes ;
+ }
+ if $(.option.show-locals)
+ {
+ show = yes ;
+ }
+ if $(show)
+ {
+ print.list-item $(item): $($(module).$(item).brief) ;
+ }
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate documentation for all possible modules. We attempt to list all known
+# modules together with a brief description of each.
+#
+local rule print-help-all (
+ ignored # Usually the module name, but is ignored here.
+)
+{
+ print.section "Modules"
+ "These are all the known modules. Use --help <module> to get more"
+ "detailed information."
+ ;
+ if $(documented-modules)
+ {
+ print.list-start ;
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print.list-item $(module-name): $($(module-name).brief) ;
+ }
+ print.list-end ;
+ }
+ # The documentation for each module when details are requested.
+ if $(documented-modules) && $(.option.detailed)
+ {
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print-help-module $(module-name) ;
+ }
+ }
+}
+
+
+# Generate documentation for a module. Basic information about the module is
+# generated.
+#
+local rule print-help-module (
+ module-name # The module to generate docs for.
+)
+{
+ # Print the docs.
+ print.section "Module '$(module-name)'" $($(module-name).docs) ;
+
+ # Print out the documented classes.
+ print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
+ Use --help $(module-name).<class-name> to get more information. ;
+
+ # Print out the documented rules.
+ print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out the documented variables.
+ print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
+ Use --help $(module-name).<variable-name> to get more information. ;
+
+ # Print out all the same information but indetailed form.
+ if $(.option.detailed)
+ {
+ print-help-classes $(module-name) ;
+ print-help-rules $(module-name) ;
+ print-help-variables $(module-name) ;
+ }
+}
+
+
+# Generate documentation for a set of rules in a module.
+#
+local rule print-help-rules (
+ module-name # Module of the rules.
+ : name * # Optional list of rules to describe.
+)
+{
+ name ?= $($(module-name).rules) ;
+ if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
+ {
+ # Print out the given rules.
+ for local rule-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
+ {
+ local signature = $($(module-name).$(rule-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
+ $($(module-name).$(rule-name).docs) ;
+ if $($(module-name).$(rule-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(rule-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of classes in a module.
+#
+local rule print-help-classes (
+ module-name # Module of the classes.
+ : name * # Optional list of classes to describe.
+)
+{
+ name ?= $($(module-name).classes) ;
+ if [ set.intersection $(name) : $($(module-name).classes) ]
+ {
+ # Print out the given classes.
+ for local class-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
+ {
+ local signature = $($(module-name).$(class-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
+ $($(module-name).$(class-name).docs)
+ "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
+ if $($(module-name).$(class-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(class-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+
+ # Print out the documented rules of the class.
+ print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out all the rules if details are requested.
+ if $(.option.detailed)
+ {
+ print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of variables in a module.
+#
+local rule print-help-variables (
+ module-name ? # Module of the variables.
+ : name * # Optional list of variables to describe.
+)
+{
+ name ?= $($(module-name).variables) ;
+ if [ set.intersection $(name) : $($(module-name).variables) ]
+ {
+ # Print out the given variables.
+ for local variable-name in [ sequence.insertion-sort $(name) ]
+ {
+ print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
+ if $($(module-name).$(variable-name).default) ||
+ $($(module-name).$(variable-name).initial)
+ {
+ print.list-start ;
+ if $($(module-name).$(variable-name).default)
+ {
+ print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
+ }
+ if $($(module-name).$(variable-name).initial)
+ {
+ print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a project.
+#
+local rule print-help-project (
+ unused ?
+ : jamfile * # The project Jamfile.
+)
+{
+ if $(jamfile<$(jamfile)>.docs)
+ {
+ # Print the docs.
+ print.section "Project-specific help"
+ Project has jamfile at $(jamfile) ;
+
+ print.lines $(jamfile<$(jamfile)>.docs) "" ;
+ }
+}
+
+
+# Generate documentation for a config file.
+#
+local rule print-help-config (
+ unused ?
+ : type # The type of configuration file user or site.
+ config-file # The configuration Jamfile.
+)
+{
+ if $(jamfile<$(config-file)>.docs)
+ {
+ # Print the docs.
+ print.section "Configuration help"
+ Configuration file at $(config-file) ;
+
+ print.lines $(jamfile<$(config-file)>.docs) "" ;
+ }
+}
+
+
+ws = " " ;
+
+# Extract the text from a block of comments.
+#
+local rule extract-comment (
+ var # The name of the variable to extract from.
+)
+{
+ local comment = ;
+ local line = $($(var)[1]) ;
+ local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ while $(l[1]) && $($(var))
+ {
+ if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; }
+ else { comment += "" ; }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ }
+ return $(comment) ;
+}
+
+
+# Extract s single line of Jam syntax, ignoring any comments.
+#
+local rule extract-syntax (
+ var # The name of the variable to extract from.
+)
+{
+ local syntax = ;
+ local line = $($(var)[1]) ;
+ while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
+ {
+ local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
+ if $(m) && ! $(m) = ""
+ {
+ syntax = $(m) ;
+ }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ }
+ return $(syntax) ;
+}
+
+
+# Extract the next token, this is either a single Jam construct or a comment as
+# a single token.
+#
+local rule extract-token (
+ var # The name of the variable to extract from.
+)
+{
+ local parts = ;
+ while ! $(parts)
+ {
+ parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
+ if ! $(parts)
+ {
+ $(var) = $($(var)[2-]) ;
+ }
+ }
+ local token = ;
+ if [ MATCH "^(#)" : $(parts[1]) ]
+ {
+ token = $(parts:J=" ") ;
+ $(var) = $($(var)[2-]) ;
+ }
+ else
+ {
+ token = $(parts[1]) ;
+ $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
+ }
+ return $(token) ;
+}
+
+
+# Scan for a rule declaration as the next item in the variable.
+#
+local rule scan-rule (
+ syntax ? # The first part of the text which contains the rule declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ local rule-parts =
+ [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
+ if $(rule-parts[1])
+ {
+ # Mark as doc for rule.
+ local rule-name = $(rule-parts[2]) ;
+ if $(scope-name)
+ {
+ rule-name = $(scope-name).$(rule-name) ;
+ }
+ local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
+ if $(comment-block)
+ {
+ set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
+ }
+ # Parse args of rule.
+ $(var) = $(rule-parts[3-]) $($(var)) ;
+ set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
+ # Scan within this rules scope.
+ local scope-level = [ extract-token $(var) ] ;
+ local scope-name = $(rule-name) ;
+ while $(scope-level)
+ {
+ local comment-block = [ extract-comment $(var) ] ;
+ local syntax-block = [ extract-syntax $(var) ] ;
+ if [ scan-rule $(syntax-block) : $(var) ]
+ {
+ }
+ else if [ MATCH "^(\\{)" : $(syntax-block) ]
+ {
+ scope-level += "{" ;
+ }
+ else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
+ {
+ scope-level = $(scope-level[2-]) ;
+ }
+ }
+
+ return true ;
+ }
+}
+
+
+# Scan the arguments of a rule.
+#
+local rule scan-rule-arguments (
+ var # The name of the variable to extract from.
+)
+{
+ local arg-syntax = ;
+ local token = [ extract-token $(var) ] ;
+ while $(token) != "(" && $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ local arg-signature = ;
+ while $(token) != ")" && $(token) != "{"
+ {
+ local arg-name = ;
+ local arg-qualifier = " " ;
+ local arg-doc = ;
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ arg-name = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ if [ MATCH "^([\\*\\+\\?])" : $(token) ]
+ {
+ arg-qualifier = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if [ MATCH "^(#)" : $(token) ]
+ {
+ $(var) = $(token) $($(var)) ;
+ arg-doc = [ extract-comment $(var) ] ;
+ token = [ extract-token $(var) ] ;
+ }
+ set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
+ }
+ while $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ $(var) = "{" $($(var)) ;
+ arg-signature ?= "" ;
+ return $(arg-signature) ;
+}
+
+
+# Scan for a variable declaration.
+#
+local rule scan-variable (
+ syntax ? # The first part of the text which contains the variable declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ # [1] = name, [2] = value(s)
+ local var-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
+ if $(var-parts)
+ {
+ local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
+ local default-value = "" ;
+ local initial-valie = "" ;
+ if $(var-parts[2]) = "?="
+ {
+ default-value = $(value) ;
+ default-value ?= "(empty)" ;
+ }
+ else
+ {
+ initial-value = $(value) ;
+ initial-value ?= "(empty)" ;
+ }
+ if $(comment-block)
+ {
+ set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
+ }
+ return true ;
+ }
+}
+
+
+# Scan a class declaration.
+#
+local rule scan-class (
+ syntax ? # The syntax text for the class declaration.
+)
+{
+ # [1] = class?, [2] = name, [3] = superclass
+ local class-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
+ if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
+ {
+ set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
+ }
+}
+
+
+# Scan a module file for documentation comments. This also invokes any actions
+# assigned to the module. The actions are the rules that do the actual output of
+# the documentation. This rule is invoked as the header scan rule for the module
+# file.
+#
+rule scan-module (
+ target # The module file.
+ : text * # The text in the file, one item per line.
+ : action * # Rule to call to output docs for the module.
+)
+{
+ if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
+ local module-name = $(target:B) ;
+ local module-documented = ;
+ local comment-block = ;
+ local syntax-block = ;
+ # This is a hack because we can not get the line of a file if it happens to
+ # not have a new-line termination.
+ text += "}" ;
+ while $(text)
+ {
+ comment-block = [ extract-comment text ] ;
+ syntax-block = [ extract-syntax text ] ;
+ if $(.option.debug)
+ {
+ ECHO "HELP:" comment block; '$(comment-block)' ;
+ ECHO "HELP:" syntax block; '$(syntax-block)' ;
+ }
+ if [ scan-rule $(syntax-block) : text ] { }
+ else if [ scan-variable $(syntax-block) : text ] { }
+ else if [ scan-class $(syntax-block) ] { }
+ else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ]
+ {
+ # mark as the copy for the module.
+ set-module-copyright $(module-name) : $(comment-block) ;
+ }
+ else if $(action[1]) in "print-help-project" "print-help-config"
+ && ! $(jamfile<$(target)>.docs)
+ {
+ # special module docs for the project jamfile.
+ jamfile<$(target)>.docs = $(comment-block) ;
+ }
+ else if ! $(module-documented)
+ {
+ # document the module.
+ set-module-doc $(module-name) : $(comment-block) ;
+ module-documented = true ;
+ }
+ }
+ if $(action)
+ {
+ $(action[1]) $(module-name) : $(action[2-]) ;
+ }
+}
+
+
+# Import scan-module to global scope, so that it is available during header
+# scanning phase.
+#
+IMPORT $(__name__) : scan-module : : doc.scan-module ;
+
+
+# Read in a file using the SHELL builtin and return the individual lines as
+# would be done for header scanning.
+#
+local rule read-file (
+ file # The file to read in.
+)
+{
+ file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
+ if ! $(.file<$(file)>.lines)
+ {
+ local content ;
+ switch [ modules.peek : OS ]
+ {
+ case NT :
+ content = [ SHELL "TYPE \"$(file)\"" ] ;
+
+ case * :
+ content = [ SHELL "cat \"$(file)\"" ] ;
+ }
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ .file<$(file)>.lines = $(lines) ;
+ }
+ return $(.file<$(file)>.lines) ;
+}
+
+
+# Add a scan action to perform to generate the help documentation. The action
+# rule is passed the name of the module as the first argument. The second
+# argument(s) are optional and passed directly as specified here.
+#
+local rule do-scan (
+ modules + # The modules to scan and perform the action on.
+ : action * # The action rule, plus the secondary arguments to pass to the action rule.
+)
+{
+ if $(help-output) = text
+ {
+ print.output $(help-output-file).txt plain ;
+ ALWAYS $(help-output-file).txt ;
+ DEPENDS all : $(help-output-file).txt ;
+ }
+ if $(help-output) = html
+ {
+ print.output $(help-output-file).html html ;
+ ALWAYS $(help-output-file).html ;
+ DEPENDS all : $(help-output-file).html ;
+ }
+ for local module-file in $(modules[1--2])
+ {
+ scan-module $(module-file) : [ read-file $(module-file) ] ;
+ }
+ scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
+}
diff --git a/jam-files/boost-build/util/indirect.jam b/jam-files/boost-build/util/indirect.jam
new file mode 100644
index 000000000..ec63f1920
--- /dev/null
+++ b/jam-files/boost-build/util/indirect.jam
@@ -0,0 +1,115 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import numbers ;
+
+
+# The pattern that indirect rules must match: module%rule
+.pattern = ^([^%]*)%([^%]+)$ ;
+
+
+#
+# Type checking rules.
+#
+local rule indirect-rule ( x )
+{
+ if ! [ MATCH $(.pattern) : $(x) ]
+ {
+ return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+# Make an indirect rule which calls the given rule. If context is supplied it is
+# expected to be the module in which to invoke the rule by the 'call' rule
+# below. Otherwise, the rule will be invoked in the module of this rule's
+# caller.
+#
+rule make ( rulename bound-args * : context ? )
+{
+ context ?= [ CALLER_MODULE ] ;
+ context ?= "" ;
+ return $(context)%$(rulename) $(bound-args) ;
+}
+
+
+# Make an indirect rule which calls the given rule. 'rulename' may be a
+# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
+# supplied, the result will be invoked (by 'call', below) in the module of the
+# caller. Otherwise, frames > 1 specifies additional call frames to back up in
+# order to find the module context.
+#
+rule make-qualified ( rulename bound-args * : frames ? )
+{
+ if [ MATCH $(.pattern) : $(rulename) ]
+ {
+ return $(rulename) $(bound-args) ;
+ }
+ else
+ {
+ frames ?= 1 ;
+ # If the rule name includes a Jamfile module, grab it.
+ local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ;
+
+ if ! $(module-context)
+ {
+ # Take the first dot-separated element as module name. This disallows
+ # module names with dots, but allows rule names with dots.
+ module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
+ }
+ module-context ?= [ CALLER_MODULE $(frames) ] ;
+ return [ make $(rulename) $(bound-args) : $(module-context) ] ;
+ }
+}
+
+
+# Returns the module name in which the given indirect rule will be invoked.
+#
+rule get-module ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ if ! $(m[1])
+ {
+ m = ;
+ }
+ return $(m[1]) ;
+}
+
+
+# Returns the rulename that will be called when x is invoked.
+#
+rule get-rule ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ return $(m[2]) ;
+}
+
+
+# Invoke the given indirect-rule.
+#
+rule call ( [indirect-rule] r args * : * )
+{
+ return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+}
+
+
+rule __test__
+{
+ import assert ;
+
+ rule foo-barr! ( x )
+ {
+ assert.equal $(x) : x ;
+ }
+
+ assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
+ assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
+
+ call [ make foo-barr! ] x ;
+ call [ make foo-barr! x ] ;
+ call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
+}
diff --git a/jam-files/boost-build/util/numbers.jam b/jam-files/boost-build/util/numbers.jam
new file mode 100644
index 000000000..665347d31
--- /dev/null
+++ b/jam-files/boost-build/util/numbers.jam
@@ -0,0 +1,218 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+
+
+rule trim-leading-zeroes ( value )
+{
+ return [ CALC $(value) + 0 ] ;
+}
+
+
+rule check ( numbers * )
+{
+ for local n in $(numbers)
+ {
+ switch $(n)
+ {
+ case *[^0-9]* :
+ errors.error $(n) "in" $(numbers) : is not a number ;
+ }
+ }
+}
+
+
+rule increment ( number )
+{
+ return [ CALC $(number) + 1 ] ;
+}
+
+
+rule decrement ( number )
+{
+ return [ CALC $(number) - 1 ] ;
+}
+
+
+rule range ( start finish ? : step ? )
+{
+ if ! $(finish)
+ {
+ finish = $(start) ;
+ start = 1 ;
+ }
+ step ?= 1 ;
+
+ check $(start) $(finish) $(step) ;
+
+ if $(finish) != 0
+ {
+ local result ;
+ while [ less $(start) $(finish) ] || $(start) = $(finish)
+ {
+ result += $(start) ;
+ start = [ CALC $(start) + $(step) ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+rule less ( n1 n2 )
+{
+ switch [ CALC $(n2) - $(n1) ]
+ {
+ case [1-9]* : return true ;
+ }
+}
+
+
+rule log10 ( number )
+{
+ switch $(number)
+ {
+ case *[^0-9]* : errors.error $(number) is not a number ;
+ case 0 : errors.error can't take log of zero ;
+ case [1-9] : return 0 ;
+ case [1-9]? : return 1 ;
+ case [1-9]?? : return 2 ;
+ case [1-9]??? : return 3 ;
+ case [1-9]???? : return 4 ;
+ case [1-9]????? : return 5 ;
+ case [1-9]?????? : return 6 ;
+ case [1-9]??????? : return 7 ;
+ case [1-9]???????? : return 8 ;
+ case [1-9]????????? : return 9 ;
+ case * :
+ {
+ import sequence ;
+ import string ;
+ local chars = [ string.chars $(number) ] ;
+ while $(chars[1]) = 0
+ {
+ chars = $(chars[2-]) ;
+ }
+ if ! $(chars)
+ {
+ errors.error can't take log of zero ;
+ }
+ else
+ {
+ return [ decrement [ sequence.length $(chars) ] ] ;
+ }
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 1 : increment 0 ;
+ assert.result 2 : increment 1 ;
+ assert.result 1 : decrement 2 ;
+ assert.result 0 : decrement 1 ;
+ assert.result 50 : increment 49 ;
+ assert.result 49 : decrement 50 ;
+ assert.result 99 : increment 98 ;
+ assert.result 99 : decrement 100 ;
+ assert.result 100 : increment 99 ;
+ assert.result 999 : decrement 1000 ;
+ assert.result 1000 : increment 999 ;
+
+ assert.result 1 2 3 : range 3 ;
+ assert.result 1 2 3 4 5 6 7 8 9 10 11 12 : range 12 ;
+ assert.result 3 4 5 6 7 8 9 10 11 : range 3 11 ;
+ assert.result : range 0 ;
+ assert.result 1 4 7 10 : range 10 : 3 ;
+ assert.result 2 4 6 8 10 : range 2 10 : 2 ;
+ assert.result 25 50 75 100 : range 25 100 : 25 ;
+
+ assert.result 0 : trim-leading-zeroes 0 ;
+ assert.result 1234 : trim-leading-zeroes 1234 ;
+ assert.result 123456 : trim-leading-zeroes 0000123456 ;
+ assert.result 1000123456 : trim-leading-zeroes 1000123456 ;
+ assert.result 10000 : trim-leading-zeroes 10000 ;
+ assert.result 10000 : trim-leading-zeroes 00010000 ;
+
+ assert.true less 1 2 ;
+ assert.true less 1 12 ;
+ assert.true less 1 21 ;
+ assert.true less 005 217 ;
+ assert.false less 0 0 ;
+ assert.false less 03 3 ;
+ assert.false less 3 03 ;
+ assert.true less 005 217 ;
+ assert.true less 0005 217 ;
+ assert.true less 5 00217 ;
+
+ # TEMPORARY disabled, because nested "try"/"catch" do not work and I do no
+ # have the time to fix that right now.
+ if $(0)
+ {
+ try ;
+ {
+ decrement 0 ;
+ }
+ catch can't decrement zero! ;
+
+ try ;
+ {
+ check foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ increment foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ log10 0 ;
+ }
+ catch can't take log of zero ;
+
+ try ;
+ {
+ log10 000 ;
+ }
+ catch can't take log of zero ;
+
+ }
+
+ assert.result 0 : log10 1 ;
+ assert.result 0 : log10 9 ;
+ assert.result 1 : log10 10 ;
+ assert.result 1 : log10 99 ;
+ assert.result 2 : log10 100 ;
+ assert.result 2 : log10 101 ;
+ assert.result 2 : log10 125 ;
+ assert.result 2 : log10 999 ;
+ assert.result 3 : log10 1000 ;
+ assert.result 10 : log10 12345678901 ;
+
+ for local x in [ range 75 110 : 5 ]
+ {
+ for local y in [ range $(x) 111 : 3 ]
+ {
+ if $(x) != $(y)
+ {
+ assert.true less $(x) $(y) ;
+ }
+ }
+ }
+
+ for local x in [ range 90 110 : 2 ]
+ {
+ for local y in [ range 80 $(x) : 4 ]
+ {
+ assert.false less $(x) $(y) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/util/option.jam b/jam-files/boost-build/util/option.jam
new file mode 100644
index 000000000..f6dc37522
--- /dev/null
+++ b/jam-files/boost-build/util/option.jam
@@ -0,0 +1,109 @@
+# Copyright (c) 2005 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+
+# Set a value for a named option, to be used when not overridden on the command
+# line.
+rule set ( name : value ? )
+{
+ .option.$(name) = $(value) ;
+}
+
+rule get ( name : default-value ? : implied-value ? )
+{
+ local m = [ MATCH --$(name)=(.*) : [ modules.peek : ARGV ] ] ;
+ if $(m)
+ {
+ return $(m[1]) ;
+ }
+ else
+ {
+ m = [ MATCH (--$(name)) : [ modules.peek : ARGV ] ] ;
+ if $(m) && $(implied-value)
+ {
+ return $(implied-value) ;
+ }
+ else if $(.option.$(name))
+ {
+ return $(.option.$(name)) ;
+ }
+ else
+ {
+ return $(default-value) ;
+ }
+ }
+}
+
+
+# Check command-line args as soon as possible. For each option try to load
+# module named after option. Is that succeeds, invoke 'process' rule in the
+# module. The rule may return "true" to indicate that the regular build process
+# should not be attempted.
+#
+# Options take the general form of: --<name>[=<value>] [<value>]
+#
+rule process ( )
+{
+ local ARGV = [ modules.peek : ARGV ] ;
+ local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
+
+ local dont-build ;
+ local args = $(ARGV) ;
+ while $(args)
+ {
+ local arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ while $(args[2-]) && ! $(arg)
+ {
+ args = $(args[2-]) ;
+ arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ }
+ args = $(args[2-]) ;
+
+ if $(arg)
+ {
+ local split = [ MATCH ^(([^-=]+)[^=]*)(=?)(.*)$ : $(arg) ] ;
+ local full-name = $(split[1]) ;
+ local prefix = $(split[2]) ;
+ local values ;
+
+ if $(split[3])
+ {
+ values = $(split[4]) ;
+ }
+ if $(args) && ! [ MATCH ^(--).* : $(args[1]) ]
+ {
+ values += $(args[1]) ;
+ args = $(args[2-]) ;
+ }
+
+ # Jook in options subdirectories of BOOST_BUILD_PATH for modules
+ # matching the full option name and then its prefix.
+ local plugin-dir = options ;
+ local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) :
+ $(full-name).jam $(prefix).jam ] ;
+
+ if $(option-files)
+ {
+ # Load the file into a module named for the option.
+ local f = $(option-files[1]) ;
+ local module-name = --$(f:D=:S=) ;
+ modules.load $(module-name) : $(f:D=) : $(f:D) ;
+
+ # If there is a process rule, call it with the full option name
+ # and its value (if any). If there was no "=" in the option, the
+ # value will be empty.
+ if process in [ RULENAMES $(module-name) ]
+ {
+ dont-build += [ modules.call-in $(module-name) : process
+ --$(full-name) : $(values) ] ;
+ }
+ }
+ }
+ }
+
+ return $(dont-build) ;
+}
diff --git a/jam-files/boost-build/util/order.jam b/jam-files/boost-build/util/order.jam
new file mode 100644
index 000000000..a74fc8c84
--- /dev/null
+++ b/jam-files/boost-build/util/order.jam
@@ -0,0 +1,169 @@
+# Copyright (C) 2003 Vladimir Prus
+# Use, modification, and distribution is subject to the Boost Software
+# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
+# at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines a class which allows to order arbitrary object with
+# regard to arbitrary binary relation.
+#
+# The primary use case is the gcc toolset, which is sensitive to library order:
+# if library 'a' uses symbols from library 'b', then 'a' must be present before
+# 'b' on the linker's command line.
+#
+# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris
+# LD (and for Solaris toolset as well), the order always matters.
+#
+# So, we need to store order requirements and then order libraries according to
+# them. It is not possible to use the dependency graph as order requirements.
+# What we need is a "use symbols" relationship while dependency graph provides
+# the "needs to be updated" relationship.
+#
+# For example::
+# lib a : a.cpp b;
+# lib b ;
+#
+# For static linking, library 'a' need not depend on 'b'. However, it should
+# still come before 'b' on the command line.
+
+class order
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Adds the constraint that 'first' should preceede 'second'.
+ rule add-pair ( first second )
+ {
+ .constraits += $(first)--$(second) ;
+ }
+ NATIVE_RULE class@order : add-pair ;
+
+ # Given a list of objects, reorder them so that the constraints specified by
+ # 'add-pair' are satisfied.
+ #
+ # The algorithm was adopted from an awk script by Nikita Youshchenko
+ # (yoush at cs dot msu dot su)
+ rule order ( objects * )
+ {
+ # The algorithm used is the same is standard transitive closure, except
+ # that we're not keeping in-degree for all vertices, but rather removing
+ # edges.
+ local result ;
+ if $(objects)
+ {
+ local constraints = [ eliminate-unused-constraits $(objects) ] ;
+
+ # Find some library that nobody depends upon and add it to the
+ # 'result' array.
+ local obj ;
+ while $(objects)
+ {
+ local new_objects ;
+ while $(objects)
+ {
+ obj = $(objects[1]) ;
+ if [ has-no-dependents $(obj) : $(constraints) ]
+ {
+ # Emulate break ;
+ new_objects += $(objects[2-]) ;
+ objects = ;
+ }
+ else
+ {
+ new_objects += $(obj) ;
+ obj = ;
+ objects = $(objects[2-]) ;
+ }
+ }
+
+ if ! $(obj)
+ {
+ errors.error "Circular order dependencies" ;
+ }
+ # No problem with placing first.
+ result += $(obj) ;
+ # Remove all contraints where 'obj' comes first, since they are
+ # already satisfied.
+ constraints = [ remove-satisfied $(constraints) : $(obj) ] ;
+
+ # Add the remaining objects for further processing on the next
+ # iteration
+ objects = $(new_objects) ;
+ }
+
+ }
+ return $(result) ;
+ }
+ NATIVE_RULE class@order : order ;
+
+ # Eliminate constraints which mention objects not in 'objects'. In
+ # graph-theory terms, this is finding a subgraph induced by ordered
+ # vertices.
+ rule eliminate-unused-constraits ( objects * )
+ {
+ local result ;
+ for local c in $(.constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) in $(objects) && $(m[2]) in $(objects)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+
+ # Returns true if there's no constraint in 'constaraints' where 'obj' comes
+ # second.
+ rule has-no-dependents ( obj : constraints * )
+ {
+ local failed ;
+ while $(constraints) && ! $(failed)
+ {
+ local c = $(constraints[1]) ;
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[2]) = $(obj)
+ {
+ failed = true ;
+ }
+ constraints = $(constraints[2-]) ;
+ }
+ if ! $(failed)
+ {
+ return true ;
+ }
+ }
+
+ rule remove-satisfied ( constraints * : obj )
+ {
+ local result ;
+ for local c in $(constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) != $(obj)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import "class" : new ;
+ import assert ;
+
+ c1 = [ new order ] ;
+ $(c1).add-pair l1 l2 ;
+
+ assert.result l1 l2 : $(c1).order l1 l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+
+ $(c1).add-pair l2 l3 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ $(c1).add-pair x l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ;
+}
diff --git a/jam-files/boost-build/util/os.jam b/jam-files/boost-build/util/os.jam
new file mode 100644
index 000000000..daef27f77
--- /dev/null
+++ b/jam-files/boost-build/util/os.jam
@@ -0,0 +1,171 @@
+# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import string ;
+
+
+# Return the value(s) of the given environment variable(s) at the time bjam was
+# invoked.
+rule environ ( variable-names + )
+{
+ return [ modules.peek .ENVIRON : $(variable-names) ] ;
+}
+
+.name = [ modules.peek : OS ] ;
+.platform = [ modules.peek : OSPLAT ] ;
+.version = [ modules.peek : OSVER ] ;
+
+
+local rule constant ( c : os ? )
+{
+ os ?= $(.name) ;
+ # First look for a platform-specific name, then the general value.
+ local variables = .$(c)-$(os) .$(c) ;
+ local result = $($(variables)) ;
+ return $(result[1]) ;
+}
+
+rule get-constant ( os ? )
+{
+ # Find the name of the constant being accessed, which is equal to the name
+ # used to invoke us.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = [ MATCH ([^.]*)$ : $(bt[4]) ] ;
+ return [ constant $(rulename) : $(os) ] ;
+}
+
+
+# export all the common constants
+.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ;
+for local constant in $(.constants)
+{
+ IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ;
+}
+EXPORT $(__name__) : $(.constants) ;
+
+.executable-path-variable-NT = PATH ;
+# On Windows the case and capitalization of PATH is not always predictable, so
+# let's find out what variable name was really set.
+if $(.name) = NT
+{
+ for local n in [ VARNAMES .ENVIRON ]
+ {
+ if $(n:L) = path
+ {
+ .executable-path-variable-NT = $(n) ;
+ }
+ }
+}
+
+# Specific constants for various platforms. There's no need to define any
+# constant whose value would be the same as the default, below.
+.shared-library-path-variable-NT = $(.executable-path-variable-NT) ;
+.path-separator-NT = ";" ;
+.expand-variable-prefix-NT = % ;
+.expand-variable-suffix-NT = % ;
+.executable-suffix-NT = .exe ;
+
+.shared-library-path-variable-CYGWIN = PATH ;
+
+.shared-library-path-variable-MACOSX = DYLD_LIBRARY_PATH ;
+
+.shared-library-path-variable-AIX = LIBPATH ;
+
+# Default constants
+.shared-library-path-variable = LD_LIBRARY_PATH ;
+.path-separator = ":" ;
+.expand-variable-prefix = $ ;
+.expand-variable-suffix = "" ;
+.executable-path-variable = PATH ;
+.executable-suffix = "" ;
+
+
+# Return a list of the directories in the PATH. Yes, that information is (sort
+# of) available in the global module, but jam code can change those values, and
+# it isn't always clear what case/capitalization to use when looking. This rule
+# is a more reliable way to get there.
+rule executable-path ( )
+{
+ return [ string.words [ environ [ constant executable-path-variable ] ]
+ : [ constant path-separator ] ] ;
+}
+
+
+# Initialize the list of home directories for the current user depending on the
+# OS.
+if $(.name) = NT
+{
+ local home = [ environ HOMEDRIVE HOMEPATH ] ;
+ .home-directories = $(home[1])$(home[2]) [ environ HOME ] [ environ USERPROFILE ] ;
+}
+else
+{
+ .home-directories = [ environ HOME ] ;
+}
+
+
+# Can't use 'constant' mechanism because it only returns 1-element values.
+rule home-directories ( )
+{
+ return $(.home-directories) ;
+}
+
+
+# Return the string needed to represent the expansion of the named shell
+# variable.
+rule expand-variable ( variable )
+{
+ local prefix = [ constant expand-variable-prefix ] ;
+ local suffix = [ constant expand-variable-suffix ] ;
+ return $(prefix)$(variable)$(suffix) ;
+}
+
+
+# Returns true if running on windows, whether in cygwin or not.
+rule on-windows ( )
+{
+ local result ;
+ if [ modules.peek : NT ]
+ {
+ result = true ;
+ }
+ else if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case CYGWIN* :
+ {
+ result = true ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+if ! [ on-windows ]
+{
+ .on-unix = 1 ;
+}
+
+
+rule on-unix
+{
+ return $(.on-unix) ;
+}
+
+
+rule __test__
+{
+ import assert ;
+ if ! ( --quiet in [ modules.peek : ARGV ] )
+ {
+ ECHO os: name= [ name ] ;
+ ECHO os: version= [ version ] ;
+ }
+ assert.true name ;
+}
diff --git a/jam-files/boost-build/util/path.jam b/jam-files/boost-build/util/path.jam
new file mode 100644
index 000000000..ea26b816b
--- /dev/null
+++ b/jam-files/boost-build/util/path.jam
@@ -0,0 +1,934 @@
+# Copyright Vladimir Prus 2002-2006.
+# Copyright Dave Abrahams 2003-2004.
+# Copyright Rene Rivera 2003-2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Performs various path manipulations. Paths are always in a 'normalized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only at the
+# beginning, and it never ends in slash, except for path consisting of slash
+# only.
+
+import errors ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import version ;
+
+
+os = [ modules.peek : OS ] ;
+if [ modules.peek : UNIX ]
+{
+ local uname = [ modules.peek : JAMUNAME ] ;
+ switch $(uname)
+ {
+ case CYGWIN* : os = CYGWIN ;
+ case * : os = UNIX ;
+ }
+}
+
+
+# Converts the native path into normalized form.
+#
+rule make ( native )
+{
+ return [ make-$(os) $(native) ] ;
+}
+
+
+# Builds native representation of the path.
+#
+rule native ( path )
+{
+ return [ native-$(os) $(path) ] ;
+}
+
+
+# Tests if a path is rooted.
+#
+rule is-rooted ( path )
+{
+ return [ MATCH "^(/)" : $(path) ] ;
+}
+
+
+# Tests if a path has a parent.
+#
+rule has-parent ( path )
+{
+ if $(path) != /
+ {
+ return 1 ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Returns the path without any directory components.
+#
+rule basename ( path )
+{
+ return [ MATCH "([^/]+)$" : $(path) ] ;
+}
+
+
+# Returns parent directory of the path. If no parent exists, error is issued.
+#
+rule parent ( path )
+{
+ if [ has-parent $(path) ]
+ {
+ if $(path) = .
+ {
+ return .. ;
+ }
+ else
+ {
+ # Strip everything at the end of path up to and including the last
+ # slash.
+ local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+
+ # Did we strip what we shouldn't?
+ if $(result[2]) = ".."
+ {
+ return $(path)/.. ;
+ }
+ else
+ {
+ if ! $(result[1])
+ {
+ if [ is-rooted $(path) ]
+ {
+ result = / ;
+ }
+ else
+ {
+ result = . ;
+ }
+ }
+ return $(result[1]) ;
+ }
+ }
+ }
+ else
+ {
+ errors.error "Path '$(path)' has no parent" ;
+ }
+}
+
+
+# Returns path2 such that "[ join path path2 ] = .". The path may not contain
+# ".." element or be rooted.
+#
+rule reverse ( path )
+{
+ if $(path) = .
+ {
+ return $(path) ;
+ }
+ else
+ {
+ local tokens = [ regex.split $(path) "/" ] ;
+ local tokens2 ;
+ for local i in $(tokens)
+ {
+ tokens2 += .. ;
+ }
+ return [ sequence.join $(tokens2) : "/" ] ;
+ }
+}
+
+
+# Concatenates the passed path elements. Generates an error if any element other
+# than the first one is rooted. Skips any empty or undefined path elements.
+#
+rule join ( elements + )
+{
+ if ! $(elements[2-])
+ {
+ return $(elements[1]) ;
+ }
+ else
+ {
+ for local e in $(elements[2-])
+ {
+ if [ is-rooted $(e) ]
+ {
+ errors.error only the first element may be rooted ;
+ }
+ }
+ if [ version.check-jam-version 3 1 17 ]
+ {
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+ else
+ {
+ # Boost Jam prior to version 3.1.17 had problems with its
+ # NORMALIZE_PATH rule in case you passed it a leading backslash
+ # instead of a slash, in some cases when you sent it an empty
+ # initial path element and possibly some others. At least some of
+ # those cases were being hit and relied upon when calling this rule
+ # from the path.make-NT rule.
+ if ! $(elements[1]) && $(elements[2])
+ {
+ return [ NORMALIZE_PATH "/" "$(elements[2-])" ] ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+ }
+ }
+}
+
+
+# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
+#
+rule root ( path root )
+{
+ if [ is-rooted $(path) ]
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ join $(root) $(path) ] ;
+ }
+}
+
+
+# Returns the current working directory.
+#
+rule pwd ( )
+{
+ if ! $(.pwd)
+ {
+ .pwd = [ make [ PWD ] ] ;
+ }
+ return $(.pwd) ;
+}
+
+
+# Returns the list of files matching the given pattern in the specified
+# directory. Both directories and patterns are supplied as portable paths. Each
+# pattern should be non-absolute path, and can't contain "." or ".." elements.
+# Each slash separated element of pattern can contain the following special
+# characters:
+# - '?', which match any character
+# - '*', which matches arbitrary number of characters.
+# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
+# only if e1 matches p1, e2 matches p2 and so on.
+#
+# For example:
+# [ glob . : *.cpp ]
+# [ glob . : */build/Jamfile ]
+#
+rule glob ( dirs * : patterns + : exclude-patterns * )
+{
+ local result ;
+ local real-patterns ;
+ local real-exclude-patterns ;
+ for local d in $(dirs)
+ {
+ for local p in $(patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-patterns += [ path.native $(pattern) ] ;
+ }
+
+ for local p in $(exclude-patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-exclude-patterns += [ path.native $(pattern) ] ;
+ }
+ }
+
+ local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
+ inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
+ local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
+ exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
+
+ return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
+ ;
+}
+
+
+# Recursive version of GLOB. Builds the glob of files while also searching in
+# the subdirectories of the given roots. An optional set of exclusion patterns
+# will filter out the matching entries from the result. The exclusions also
+# apply to the subdirectory scanning, such that directories that match the
+# exclusion patterns will not be searched.
+#
+rule glob-tree ( roots * : patterns + : exclude-patterns * )
+{
+ return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
+ path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
+}
+
+
+local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
+{
+ local excluded ;
+ if $(exclude-patterns)
+ {
+ excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
+ }
+ local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
+ $(excluded) ] ;
+ local subdirs ;
+ for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
+ {
+ if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
+ {
+ subdirs += $(d) ;
+ }
+ }
+ if $(subdirs)
+ {
+ result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
+ ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true is the specified file exists.
+#
+rule exists ( file )
+{
+ return [ path.glob $(file:D) : $(file:D=) ] ;
+}
+NATIVE_RULE path : exists ;
+
+
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names. If
+# 'upper_limit' is specified, directories above it will be pruned.
+#
+rule all-parents ( path : upper_limit ? : cwd ? )
+{
+ cwd ?= [ pwd ] ;
+ local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
+
+ if ! $(upper_limit)
+ {
+ upper_limit = / ;
+ }
+ local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
+
+ # Leave only elements in 'path_ele' below 'upper_ele'.
+ while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
+ {
+ upper_ele = $(upper_ele[2-]) ;
+ path_ele = $(path_ele[2-]) ;
+ }
+
+ # Have all upper elements been removed ?
+ if $(upper_ele)
+ {
+ errors.error "$(upper_limit) is not prefix of $(path)" ;
+ }
+
+ # Create the relative paths to parents, number of elements in 'path_ele'.
+ local result ;
+ for local i in $(path_ele)
+ {
+ path = [ parent $(path) ] ;
+ result += $(path) ;
+ }
+ return $(result) ;
+}
+
+
+# Search for 'pattern' in parent directories of 'dir', up till and including
+# 'upper_limit', if it is specified, or till the filesystem root otherwise.
+#
+rule glob-in-parents ( dir : patterns + : upper-limit ? )
+{
+ local result ;
+ local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
+
+ while $(parent-dirs) && ! $(result)
+ {
+ result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
+ parent-dirs = $(parent-dirs[2-]) ;
+ }
+ return $(result) ;
+}
+
+
+# Assuming 'child' is a subdirectory of 'parent', return the relative path from
+# 'parent' to 'child'.
+#
+rule relative ( child parent : no-error ? )
+{
+ local not-a-child ;
+ if $(parent) = "."
+ {
+ return $(child) ;
+ }
+ else
+ {
+ local split1 = [ regex.split $(parent) / ] ;
+ local split2 = [ regex.split $(child) / ] ;
+
+ while $(split1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ not-a-child = true ;
+ split1 = ;
+ }
+ }
+ if $(split2)
+ {
+ if $(not-a-child)
+ {
+ if $(no-error)
+ {
+ return not-a-child ;
+ }
+ else
+ {
+ errors.error $(child) is not a subdir of $(parent) ;
+ }
+ }
+ else
+ {
+ return [ join $(split2) ] ;
+ }
+ }
+ else
+ {
+ return "." ;
+ }
+ }
+}
+
+
+# Returns the minimal path to path2 that is relative path1.
+#
+rule relative-to ( path1 path2 )
+{
+ local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+ local split1 = [ regex.split $(path1) / ] ;
+ local split2 = [ regex.split $(path2) / ] ;
+
+ while $(split1) && $(root_1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ root_1 = $(root_1[2-]) ;
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ split1 = ;
+ }
+ }
+ return [ join . $(root_1) $(split2) ] ;
+}
+
+
+# Returns the list of paths which are used by the operating system for looking
+# up programs.
+#
+rule programs-path ( )
+{
+ local result ;
+ local raw = [ modules.peek : PATH Path path ] ;
+ for local p in $(raw)
+ {
+ if $(p)
+ {
+ result += [ path.make $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+rule makedirs ( path )
+{
+ local result = true ;
+ local native = [ native $(path) ] ;
+ if ! [ exists $(native) ]
+ {
+ if [ makedirs [ parent $(path) ] ]
+ {
+ if ! [ MAKEDIR $(native) ]
+ {
+ errors.error "Could not create directory '$(path)'" ;
+ result = ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+# Converts native Windows paths into our internal canonic path representation.
+# Supports 'invalid' paths containing multiple successive path separator
+# characters.
+#
+# TODO: Check and if needed add support for Windows 'X:file' path format where
+# the file is located in the current folder on drive X.
+#
+rule make-NT ( native )
+{
+ local result ;
+
+ if [ version.check-jam-version 3 1 17 ]
+ {
+ result = [ NORMALIZE_PATH $(native) ] ;
+ }
+ else
+ {
+ # This old implementation is really fragile due to a not so clear way
+ # NORMALIZE_PATH rule worked in Boost.Jam versions prior to 3.1.17. E.g.
+ # path.join would mostly ignore empty path elements but would root the
+ # joined path in case the initial two path elements were empty or some
+ # similar accidental wierdness.
+ result = [ path.join [ regex.split $(native) "[/\\]" ] ] ;
+ }
+
+ # We need to add an extra '/' in front in case this is a rooted Windows path
+ # starting with a drive letter and not a path separator character since the
+ # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
+ # and treats it as a regular folder name.
+ if [ regex.match "(^.:)" : $(native) ]
+ {
+ result = /$(result) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule native-NT ( path )
+{
+ local result ;
+ if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ]
+ {
+ result = $(path) ;
+ }
+ else
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ;
+ }
+ result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
+ return $(result) ;
+}
+
+
+rule make-UNIX ( native )
+{
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ if ! $(native)
+ {
+ errors.error "Empty path passed to 'make-UNIX'" ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH $(native:T) ] ;
+ }
+}
+
+
+rule native-UNIX ( path )
+{
+ return $(path) ;
+}
+
+
+rule make-CYGWIN ( path )
+{
+ return [ make-NT $(path) ] ;
+}
+
+
+rule native-CYGWIN ( path )
+{
+ local result = $(path) ;
+ if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
+ }
+ return [ native-UNIX $(result) ] ;
+}
+
+
+# split-path-VMS: splits input native path into device dir file (each part is
+# optional).
+#
+# example:
+#
+# dev:[dir]file.c => dev: [dir] file.c
+#
+rule split-path-VMS ( native )
+{
+ local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
+ local device = $(matches[1]) ;
+ local dir = $(matches[2]) ;
+ local file = $(matches[3]) ;
+
+ return $(device) $(dir) $(file) ;
+}
+
+
+# Converts a native VMS path into a portable path spec.
+#
+# Does not handle current-device absolute paths such as "[dir]File.c" as it is
+# not clear how to represent them in the portable path notation.
+#
+# Adds a trailing dot (".") to the file part if no extension is present (helps
+# when converting it back into native path).
+#
+rule make-VMS ( native )
+{
+ if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+ {
+ errors.error "Can't handle default-device absolute paths: " $(native) ;
+ }
+
+ local parts = [ split-path-VMS $(native) ] ;
+ local device = $(parts[1]) ;
+ local dir = $(parts[2]) ;
+ local file = $(parts[3]) ;
+ local elems ;
+
+ if $(device)
+ {
+ #
+ # rooted
+ #
+ elems = /$(device) ;
+ }
+
+ if $(dir) = "[]"
+ {
+ #
+ # Special case: current directory
+ #
+ elems = $(elems) "." ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+ local dir_parts = [ regex.split $(dir) \\. ] ;
+
+ if $(dir_parts[1]) = ""
+ {
+ #
+ # Relative path
+ #
+ dir_parts = $(dir_parts[2--1]) ;
+ }
+
+ #
+ # replace "parent-directory" parts (- => ..)
+ #
+ dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+
+ elems = $(elems) $(dir_parts) ;
+ }
+
+ if $(file)
+ {
+ if ! [ MATCH (\\.) : $(file) ]
+ {
+ #
+ # Always add "." to end of non-extension file.
+ #
+ file = $(file). ;
+ }
+ elems = $(elems) $(file) ;
+ }
+
+ local portable = [ path.join $(elems) ] ;
+
+ return $(portable) ;
+}
+
+
+# Converts a portable path spec into a native VMS path.
+#
+# Relies on having at least one dot (".") included in the file name to be able
+# to differentiate it from the directory part.
+#
+rule native-VMS ( path )
+{
+ local device = "" ;
+ local dir = $(path) ;
+ local file = "" ;
+ local native ;
+ local split ;
+
+ #
+ # Has device ?
+ #
+ if [ is-rooted $(dir) ]
+ {
+ split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+ device = $(split[1]) ;
+ dir = $(split[2]) ;
+ }
+
+ #
+ # Has file ?
+ #
+ # This is no exact science, just guess work:
+ #
+ # If the last part of the current path spec
+ # includes some chars, followed by a dot,
+ # optionally followed by more chars -
+ # then it is a file (keep your fingers crossed).
+ #
+ split = [ regex.split $(dir) / ] ;
+ local maybe_file = $(split[-1]) ;
+
+ if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+ {
+ file = $(maybe_file) ;
+ dir = [ sequence.join $(split[1--2]) : / ] ;
+ }
+
+ #
+ # Has dir spec ?
+ #
+ if $(dir) = "."
+ {
+ dir = "[]" ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) \\.\\. - ] ;
+ dir = [ regex.replace $(dir) / . ] ;
+
+ if $(device) = ""
+ {
+ #
+ # Relative directory
+ #
+ dir = "."$(dir) ;
+ }
+ dir = "["$(dir)"]" ;
+ }
+
+ native = [ sequence.join $(device) $(dir) $(file) ] ;
+
+ return $(native) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import errors : try catch ;
+
+ assert.true is-rooted "/" ;
+ assert.true is-rooted "/foo" ;
+ assert.true is-rooted "/foo/bar" ;
+ assert.result : is-rooted "." ;
+ assert.result : is-rooted "foo" ;
+ assert.result : is-rooted "foo/bar" ;
+
+ assert.true has-parent "foo" ;
+ assert.true has-parent "foo/bar" ;
+ assert.true has-parent "." ;
+ assert.result : has-parent "/" ;
+
+ assert.result "." : basename "." ;
+ assert.result ".." : basename ".." ;
+ assert.result "foo" : basename "foo" ;
+ assert.result "foo" : basename "bar/foo" ;
+ assert.result "foo" : basename "gaz/bar/foo" ;
+ assert.result "foo" : basename "/gaz/bar/foo" ;
+
+ assert.result "." : parent "foo" ;
+ assert.result "/" : parent "/foo" ;
+ assert.result "foo/bar" : parent "foo/bar/giz" ;
+ assert.result ".." : parent "." ;
+ assert.result ".." : parent "../foo" ;
+ assert.result "../../foo" : parent "../../foo/bar" ;
+
+ assert.result "." : reverse "." ;
+ assert.result ".." : reverse "foo" ;
+ assert.result "../../.." : reverse "foo/bar/giz" ;
+
+ assert.result "foo" : join "foo" ;
+ assert.result "/foo" : join "/" "foo" ;
+ assert.result "foo/bar" : join "foo" "bar" ;
+ assert.result "foo/bar" : join "foo/giz" "../bar" ;
+ assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+ assert.result ".." : join "." ".." ;
+ assert.result ".." : join "foo" "../.." ;
+ assert.result "../.." : join "../foo" "../.." ;
+ assert.result "/foo" : join "/bar" "../foo" ;
+ assert.result "foo/giz" : join "foo/giz" "." ;
+ assert.result "." : join lib2 ".." ;
+ assert.result "/" : join "/a" ".." ;
+
+ assert.result /a/b : join /a/b/c .. ;
+
+ assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+ assert.result "giz" : join "foo" ".." "giz" ;
+ assert.result "foo/giz" : join "foo" "." "giz" ;
+
+ try ;
+ {
+ join "a" "/b" ;
+ }
+ catch only first element may be rooted ;
+
+ local CWD = "/home/ghost/build" ;
+ assert.result : all-parents . : . : $(CWD) ;
+ assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+ assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
+ assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
+
+ local CWD = "/home/ghost" ;
+ assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+ assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+
+ assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+ assert.result "foo" : relative "foo" "." ;
+
+ local save-os = [ modules.peek path : os ] ;
+ modules.poke path : os : NT ;
+
+ assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+ assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
+ assert.result "foo" : make "foo/" ;
+ assert.result "foo" : make "foo\\" ;
+ assert.result "foo" : make "foo/." ;
+ assert.result "foo" : make "foo/bar/.." ;
+ assert.result "foo" : make "foo/bar/../" ;
+ assert.result "foo" : make "foo/bar/..\\" ;
+ assert.result "foo/bar" : make "foo/././././bar" ;
+ assert.result "/foo" : make "\\foo" ;
+ assert.result "/D:/My Documents" : make "D:\\My Documents" ;
+ assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+
+ # Test processing 'invalid' paths containing multiple successive path
+ # separators.
+ assert.result "foo" : make "foo//" ;
+ assert.result "foo" : make "foo///" ;
+ assert.result "foo" : make "foo\\\\" ;
+ assert.result "foo" : make "foo\\\\\\" ;
+ assert.result "/foo" : make "//foo" ;
+ assert.result "/foo" : make "///foo" ;
+ assert.result "/foo" : make "\\\\foo" ;
+ assert.result "/foo" : make "\\\\\\foo" ;
+ assert.result "/foo" : make "\\/\\/foo" ;
+ assert.result "foo/bar" : make "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
+ assert.result "foo" : make "foo/bar//.." ;
+ assert.result "foo/bar" : make "foo/bar/giz//.." ;
+ assert.result "foo/giz" : make "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
+ assert.result "../../../foo" : make "..///.//..///.//..////foo///" ;
+
+ # Test processing 'invalid' rooted paths with too many '..' path elements
+ # that would place them before the root.
+ assert.result : make "/.." ;
+ assert.result : make "/../" ;
+ assert.result : make "/../." ;
+ assert.result : make "/.././" ;
+ assert.result : make "/foo/../bar/giz/.././././../../." ;
+ assert.result : make "/foo/../bar/giz/.././././../.././" ;
+ assert.result : make "//foo/../bar/giz/.././././../../." ;
+ assert.result : make "//foo/../bar/giz/.././././../.././" ;
+ assert.result : make "\\\\foo/../bar/giz/.././././../../." ;
+ assert.result : make "\\\\foo/../bar/giz/.././././../.././" ;
+ assert.result : make "/..///.//..///.//..////foo///" ;
+
+ assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
+ assert.result "foo" : native "foo" ;
+ assert.result "\\foo" : native "/foo" ;
+ assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
+
+ modules.poke path : os : UNIX ;
+
+ assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+ assert.result "/sub1" : make "/sub1/." ;
+ assert.result "/sub1" : make "/sub1/sub2/.." ;
+ assert.result "sub1" : make "sub1/." ;
+ assert.result "sub1" : make "sub1/sub2/.." ;
+ assert.result "/foo/bar" : native "/foo/bar" ;
+
+ modules.poke path : os : VMS ;
+
+ #
+ # Don't really need to poke os before these
+ #
+ assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+ assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+ assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+ assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+ assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+ assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+ assert.result "" "" "file" : split-path-VMS "file" ;
+ assert.result "" "" "" : split-path-VMS "" ;
+
+ #
+ # Special case: current directory
+ #
+ assert.result "" "[]" "" : split-path-VMS "[]" ;
+ assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+ assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+ assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+
+ #
+ # Make portable paths
+ #
+ assert.result "/disk:" : make "disk:" ;
+ assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
+ assert.result "foo" : make "[.foo]" ;
+ assert.result "foo" : make "[.foo.bar.-]" ;
+ assert.result ".." : make "[.-]" ;
+ assert.result ".." : make "[-]" ;
+ assert.result "." : make "[]" ;
+ assert.result "giz.h" : make "giz.h" ;
+ assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
+ assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
+ assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
+
+ #
+ # Special case (adds '.' to end of file w/o extension to
+ # disambiguate from directory in portable path spec).
+ #
+ assert.result "Jamfile." : make "Jamfile" ;
+ assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
+ assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
+
+ #
+ # Make native paths
+ #
+ assert.result "disk:" : native "/disk:" ;
+ assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
+ assert.result "[.foo]" : native "foo" ;
+ assert.result "[.-]" : native ".." ;
+ assert.result "[.foo.-]" : native "foo/.." ;
+ assert.result "[]" : native "." ;
+ assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
+ assert.result "giz.h" : native "giz.h" ;
+ assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
+ assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
+
+ modules.poke path : os : $(save-os) ;
+}
diff --git a/jam-files/boost-build/util/print.jam b/jam-files/boost-build/util/print.jam
new file mode 100644
index 000000000..708d21aba
--- /dev/null
+++ b/jam-files/boost-build/util/print.jam
@@ -0,0 +1,488 @@
+# Copyright 2003 Douglas Gregor
+# Copyright 2002, 2003, 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Utilities for generating format independent output. Using these
+# will help in generation of documentation in at minimum plain/console
+# and html.
+
+import modules ;
+import numbers ;
+import string ;
+import regex ;
+import "class" ;
+import scanner ;
+import path ;
+
+# The current output target. Defaults to console.
+output-target = console ;
+
+# The current output type. Defaults to plain. Other possible values are "html".
+output-type = plain ;
+
+# Whitespace.
+.whitespace = [ string.whitespace ] ;
+
+
+# Set the target and type of output to generate. This sets both the destination
+# output and the type of docs to generate to that output. The target can be
+# either a file or "console" for echoing to the console. If the type of output
+# is not specified it defaults to plain text.
+#
+rule output (
+ target # The target file or device; file or "console".
+ type ? # The type of output; "plain" or "html".
+)
+{
+ type ?= plain ;
+ if $(output-target) != $(target)
+ {
+ output-target = $(target) ;
+ output-type = $(type) ;
+ if $(output-type) = html
+ {
+ text
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
+ "<html>"
+ "<head>"
+ "</head>"
+ "<body link=\"#0000ff\" vlink=\"#800080\">"
+ : true
+ : prefix ;
+ text
+ "</body>"
+ "</html>"
+ :
+ : suffix ;
+ }
+ }
+}
+
+
+# Generate a section with a description. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule section (
+ name # The name of the section.
+ description * # A number of description lines.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words $(name): ] ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ name = [ escape-html $(name) ] ;
+ text <h3>$(name)</h3> <p> ;
+ }
+ local pre = ;
+ while $(description)
+ {
+ local paragraph = ;
+ while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
+ if $(pre)
+ {
+ while $(description) && (
+ $(pre) = " $(description[1])" ||
+ ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
+ )
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
+ pre = ;
+ if $(output-type) = plain
+ {
+ lines $(paragraph) "" : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ while $(description) && ! [ string.is-whitespace $(description[1]) ]
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ if $(paragraph[1]) = :: && ! $(paragraph[2])
+ {
+ pre = " " ;
+ }
+ if $(paragraph[1]) = ::
+ {
+ if $(output-type) = plain
+ {
+ lines $(paragraph[2-]) "" : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph[2-]) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
+ local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
+ p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
+ if $(p[3]) = ::
+ {
+ pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
+ if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; }
+ else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ else
+ {
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ }
+ }
+ }
+ if $(output-type) = html
+ {
+ text </p> ;
+ }
+}
+
+
+# Generate the start of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-start ( )
+{
+ if $(output-type) = plain
+ {
+ }
+ else if $(output-type) = html
+ {
+ text <ul> ;
+ }
+}
+
+
+# Generate an item in a list. The type of output can be controlled by the value
+# of the 'output-type' variable.
+#
+rule list-item (
+ item + # The item to list.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words "*" $(item) ] : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <li> [ escape-html $(item) ] </li> ;
+ }
+}
+
+
+# Generate the end of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-end ( )
+{
+ if $(output-type) = plain
+ {
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </ul> ;
+ }
+}
+
+
+# Split the given text into separate lines, word-wrapping to a margin. The
+# default margin is 78 characters.
+#
+rule split-at-words (
+ text + # The text to split.
+ : margin ? # An optional margin, default is 78.
+)
+{
+ local lines = ;
+ text = [ string.words $(text:J=" ") ] ;
+ text = $(text:J=" ") ;
+ margin ?= 78 ;
+ local char-match-1 = ".?" ;
+ local char-match = "" ;
+ while $(margin) != 0
+ {
+ char-match = $(char-match)$(char-match-1) ;
+ margin = [ numbers.decrement $(margin) ] ;
+ }
+ while $(text)
+ {
+ local s = "" ;
+ local t = "" ;
+ # divide s into the first X characters and the rest
+ s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
+
+ if $(s[2])
+ {
+ # split the first half at a space
+ t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
+ }
+ else
+ {
+ t = $(s) ;
+ }
+
+ if ! $(t[2])
+ {
+ t += "" ;
+ }
+
+ text = $(t[2])$(s[2]) ;
+ lines += $(t[1]) ;
+ }
+ return $(lines) ;
+}
+
+
+# Generate a set of fixed lines. Each single item passed in is output on a
+# separate line. For console this just echos each line, but for html this will
+# split them with <br>.
+#
+rule lines (
+ text * # The lines of text.
+ : indent ? # Optional indentation prepended to each line after the first one.
+ outdent ? # Optional indentation to prepend to the first line.
+)
+{
+ text ?= "" ;
+ indent ?= "" ;
+ outdent ?= "" ;
+ if $(output-type) = plain
+ {
+ text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
+ }
+ else if $(output-type) = html
+ {
+ local indent-chars = [ string.chars $(indent) ] ;
+ indent = "" ;
+ for local c in $(indent-chars)
+ {
+ if $(c) = " " { c = "&nbsp;" ; }
+ else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
+ indent = $(indent)$(c) ;
+ }
+ local html-text = [ escape-html $(text) : "&nbsp;" ] ;
+ text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
+ }
+}
+
+
+# Output text directly to the current target. When doing output to a file, one
+# can indicate if the text should be output to "prefix" it, as the "body"
+# (default), or "suffix" of the file. This is independant of the actual
+# execution order of the text rule. This rule invokes a singular action, one
+# action only once, which does the build of the file. Therefore actions on the
+# target outside of this rule will happen entirely before and/or after all
+# output using this rule.
+#
+rule text (
+ strings * # The strings of text to output.
+ : overwrite ? # true to overwrite the output (if it is a file)
+ : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for a file).
+)
+{
+ prefix-body-suffix ?= body ;
+ if $(output-target) = console
+ {
+ if ! $(strings)
+ {
+ ECHO ;
+ }
+ else
+ {
+ for local s in $(strings)
+ {
+ ECHO $(s) ;
+ }
+ }
+ }
+ if ! $($(output-target).did-action)
+ {
+ $(output-target).did-action = yes ;
+ $(output-target).text-prefix = ;
+ $(output-target).text-body = ;
+ $(output-target).text-suffix = ;
+
+ nl on $(output-target) = "
+" ;
+ text-redirect on $(output-target) = ">>" ;
+ if $(overwrite)
+ {
+ text-redirect on $(output-target) = ">" ;
+ }
+ text-content on $(output-target) = ;
+
+ text-action $(output-target) ;
+
+ if $(overwrite) && $(output-target) != console
+ {
+ check-for-update $(output-target) ;
+ }
+ }
+ $(output-target).text-$(prefix-body-suffix) += $(strings) ;
+ text-content on $(output-target) =
+ $($(output-target).text-prefix)
+ $($(output-target).text-body)
+ $($(output-target).text-suffix) ;
+}
+
+
+# Outputs the text to the current targets, after word-wrapping it.
+#
+rule wrapped-text ( text + )
+{
+ local lines = [ split-at-words $(text) ] ;
+ text $(lines) ;
+}
+
+
+# Escapes text into html/xml printable equivalents. Does not know about tags and
+# therefore tags fed into this will also be escaped. Currently escapes space,
+# "<", ">", and "&".
+#
+rule escape-html (
+ text + # The text to escape.
+ : space ? # What to replace spaces with, defaults to " ".
+)
+{
+ local html-text = ;
+ while $(text)
+ {
+ local html = $(text[1]) ;
+ text = $(text[2-]) ;
+ html = [ regex.replace $(html) "&" "&amp;" ] ;
+ html = [ regex.replace $(html) "<" "&lt;" ] ;
+ html = [ regex.replace $(html) ">" "&gt;" ] ;
+ if $(space)
+ {
+ html = [ regex.replace $(html) " " "$(space)" ] ;
+ }
+ html-text += $(html) ;
+ }
+ return $(html-text) ;
+}
+
+
+# Outputs the text strings collected by the text rule to the output file.
+#
+actions quietly text-action
+{
+ @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
+}
+
+
+rule get-scanner ( )
+{
+ if ! $(.scanner)
+ {
+ .scanner = [ class.new print-scanner ] ;
+ }
+ return $(.scanner) ;
+}
+
+
+# The following code to update print targets when their contents
+# change is a horrible hack. It basically creates a target which
+# binds to this file (print.jam) and installs a scanner on it
+# which reads the target and compares its contents to the new
+# contents that we're writing.
+#
+rule check-for-update ( target )
+{
+ local scanner = [ get-scanner ] ;
+ local file = [ path.native [ modules.binding $(__name__) ] ] ;
+ local g = [ MATCH <(.*)> : $(target:G) ] ;
+ local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
+ DEPENDS $(target) : $(dependency-target) ;
+ SEARCH on $(dependency-target) = $(file:D) ;
+ ISFILE $(dependency-target) ;
+ NOUPDATE $(dependency-target) ;
+ base on $(dependency-target) = $(target) ;
+ scanner.install $(scanner) : $(dependency-target) none ;
+ return $(dependency-target) ;
+}
+
+
+class print-scanner : scanner
+{
+ import path ;
+ import os ;
+
+ rule pattern ( )
+ {
+ return "(One match...)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local base = [ on $(target) return $(base) ] ;
+ local nl = [ on $(base) return $(nl) ] ;
+ local text-content = [ on $(base) return $(text-content) ] ;
+ local dir = [ on $(base) return $(LOCATE) ] ;
+ if $(dir)
+ {
+ dir = [ path.make $(dir) ] ;
+ }
+ local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
+ local actual-content ;
+ if [ os.name ] = NT
+ {
+ actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
+ }
+ else
+ {
+ actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
+ }
+ if $(text-content:J=$(nl)) != $(actual-content)
+ {
+ ALWAYS $(base) ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result one two three : split-at-words one two three : 5 ;
+ assert.result "one two" three : split-at-words one two three : 8 ;
+ assert.result "one two" three : split-at-words one two three : 9 ;
+ assert.result "one two three" : split-at-words one two three ;
+
+ # VP, 2004-12-03 The following test fails for some reason, so commenting it
+ # out.
+ #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
+ # escape-html "one two three" "&<>" ;
+}
diff --git a/jam-files/boost-build/util/regex.jam b/jam-files/boost-build/util/regex.jam
new file mode 100644
index 000000000..234c36f62
--- /dev/null
+++ b/jam-files/boost-build/util/regex.jam
@@ -0,0 +1,193 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Returns a list of the following substrings:
+# 1) from beginning till the first occurrence of 'separator' or till the end,
+# 2) between each occurrence of 'separator' and the next occurrence,
+# 3) from the last occurrence of 'separator' till the end.
+# If no separator is present, the result will contain only one element.
+#
+
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ # Break pieaces off 's' until it has no separators left.
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ # Combine the remaining part at the beginning, which does not have
+ # separators, with the pieces broken off. Note that the rule's signature
+ # does not allow the initial s to be empty.
+ return $(s) $(result) ;
+}
+
+
+# Returns the concatenated results of Applying regex.split to every element of
+# the list using the separator pattern.
+#
+rule split-list ( list * : separator )
+{
+ local result ;
+ for s in $(list)
+ {
+ result += [ split $(s) $(separator) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Match string against pattern, and return the elements indicated by indices.
+#
+rule match ( pattern : string : indices * )
+{
+ indices ?= 1 2 3 4 5 6 7 8 9 ;
+ local x = [ MATCH $(pattern) : $(string) ] ;
+ return $(x[$(indices)]) ;
+}
+
+
+# Matches all elements of 'list' agains the 'pattern' and returns a list of
+# elements indicated by indices of all successful matches. If 'indices' is
+# omitted returns a list of first paranthethised groups of all successful
+# matches.
+#
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE regex : transform ;
+
+
+# Escapes all of the characters in symbols using the escape symbol escape-symbol
+# for the given string, and returns the escaped string.
+#
+rule escape ( string : symbols : escape-symbol )
+{
+ local result = "" ;
+ local m = 1 ;
+ while $(m)
+ {
+ m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ;
+ if $(m)
+ {
+ m += "" ; # Supposedly a bug fix; borrowed from regex.split
+ result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
+ string = $(m[3]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(result)$(string)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given string and returns the new
+# string. The match string can be a regex expression.
+#
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given list of strings and returns
+# a list of new strings. The match string can be a regex expression.
+#
+# list - the list of strings to modify.
+# match - the search expression.
+# replacement - the string to replace with.
+#
+rule replace-list ( list * : match : replacement )
+{
+ local result ;
+ for local e in $(list)
+ {
+ result += [ replace $(e) $(match) $(replacement) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result a b c : split "a/b/c" / ;
+ assert.result "" a b c : split "/a/b/c" / ;
+ assert.result "" "" a b c : split "//a/b/c" / ;
+ assert.result "" a "" b c : split "/a//b/c" / ;
+ assert.result "" a "" b c "" : split "/a//b/c/" / ;
+ assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
+
+ assert.result a c b d
+ : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
+
+ assert.result a b c d
+ : match (.)(.)(.)(.) : abcd ;
+
+ assert.result ababab cddc
+ : match ((ab)*)([cd]+) : abababcddc : 1 3 ;
+
+ assert.result a.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
+
+ assert.result a.h b.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ;
+
+ assert.result "^<?xml version=\"1.0\"^>"
+ : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
+
+ assert.result "<?xml version=\\\"1.0\\\">"
+ : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
+
+ assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
+ assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
+ assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
+ assert.result "-" : replace "&" "&" "-" ;
+
+ assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
+}
diff --git a/jam-files/boost-build/util/sequence.jam b/jam-files/boost-build/util/sequence.jam
new file mode 100644
index 000000000..73919a65d
--- /dev/null
+++ b/jam-files/boost-build/util/sequence.jam
@@ -0,0 +1,335 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert ;
+import numbers ;
+import modules ;
+
+
+# Note that algorithms in this module execute largely in the caller's module
+# namespace, so that local rules can be used as function objects. Also note that
+# most predicates can be multi-element lists. In that case, all but the first
+# element are prepended to the first argument which is passed to the rule named
+# by the first element.
+
+
+# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
+# non-null value.
+#
+rule filter ( predicate + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ if [ modules.call-in $(caller) : $(predicate) $(e) ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
+# $(sequence).
+#
+rule transform ( function + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ result += [ modules.call-in $(caller) : $(function) $(e) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule reverse ( s * )
+{
+ local r ;
+ for local x in $(s)
+ {
+ r = $(x) $(r) ;
+ }
+ return $(r) ;
+}
+
+
+rule less ( a b )
+{
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+}
+
+
+# Insertion-sort s using the BinaryPredicate ordered.
+#
+rule insertion-sort ( s * : ordered * )
+{
+ if ! $(ordered)
+ {
+ return [ SORT $(s) ] ;
+ }
+ else
+ {
+ local caller = [ CALLER_MODULE ] ;
+ ordered ?= sequence.less ;
+ local result = $(s[1]) ;
+ if $(ordered) = sequence.less
+ {
+ local head tail ;
+ for local x in $(s[2-])
+ {
+ head = ;
+ tail = $(result) ;
+ while $(tail) && ( $(tail[1]) < $(x) )
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+ else
+ {
+ for local x in $(s[2-])
+ {
+ local head tail ;
+ tail = $(result) ;
+ while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+
+ return $(result) ;
+ }
+}
+
+
+# Merge two ordered sequences using the BinaryPredicate ordered.
+#
+rule merge ( s1 * : s2 * : ordered * )
+{
+ ordered ?= sequence.less ;
+ local result__ ;
+ local caller = [ CALLER_MODULE ] ;
+
+ while $(s1) && $(s2)
+ {
+ if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
+ {
+ result__ += $(s1[1]) ;
+ s1 = $(s1[2-]) ;
+ }
+ else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
+ {
+ result__ += $(s2[1]) ;
+ s2 = $(s2[2-]) ;
+ }
+ else
+ {
+ s2 = $(s2[2-]) ;
+ }
+
+ }
+ result__ += $(s1) ;
+ result__ += $(s2) ;
+
+ return $(result__) ;
+}
+
+
+# Join the elements of s into one long string. If joint is supplied, it is used
+# as a separator.
+#
+rule join ( s * : joint ? )
+{
+ joint ?= "" ;
+ return $(s:J=$(joint)) ;
+}
+
+
+# Find the length of any sequence.
+#
+rule length ( s * )
+{
+ local result = 0 ;
+ for local i in $(s)
+ {
+ result = [ CALC $(result) + 1 ] ;
+ }
+ return $(result) ;
+}
+
+
+rule unique ( list * : stable ? )
+{
+ local result ;
+ local prev ;
+ if $(stable)
+ {
+ for local f in $(list)
+ {
+ if ! $(f) in $(result)
+ {
+ result += $(f) ;
+ }
+ }
+ }
+ else
+ {
+ for local i in [ SORT $(list) ]
+ {
+ if $(i) != $(prev)
+ {
+ result += $(i) ;
+ }
+ prev = $(i) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
+# 'numbers.less' if none is provided.
+#
+rule max-element ( elements + : ordered ? )
+{
+ ordered ?= numbers.less ;
+
+ local max = $(elements[1]) ;
+ for local e in $(elements[2-])
+ {
+ if [ $(ordered) $(max) $(e) ]
+ {
+ max = $(e) ;
+ }
+ }
+ return $(max) ;
+}
+
+
+# Returns all of 'elements' for which corresponding element in parallel list
+# 'rank' is equal to the maximum value in 'rank'.
+#
+rule select-highest-ranked ( elements * : ranks * )
+{
+ if $(elements)
+ {
+ local max-rank = [ max-element $(ranks) ] ;
+ local result ;
+ while $(elements)
+ {
+ if $(ranks[1]) = $(max-rank)
+ {
+ result += $(elements[1]) ;
+ }
+ elements = $(elements[2-]) ;
+ ranks = $(ranks[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+NATIVE_RULE sequence : select-highest-ranked ;
+
+
+rule __test__ ( )
+{
+ # Use a unique module so we can test the use of local rules.
+ module sequence.__test__
+ {
+ import assert ;
+ import sequence ;
+
+ local rule is-even ( n )
+ {
+ if $(n) in 0 2 4 6 8
+ {
+ return true ;
+ }
+ }
+
+ assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
+
+ # Test that argument binding works.
+ local rule is-equal-test ( x y )
+ {
+ if $(x) = $(y)
+ {
+ return true ;
+ }
+ }
+
+ assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
+
+ local rule append-x ( n )
+ {
+ return $(n)x ;
+ }
+
+ assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
+
+ local rule repeat2 ( x )
+ {
+ return $(x) $(x) ;
+ }
+
+ assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
+
+ local rule test-greater ( a b )
+ {
+ if $(a) > $(b)
+ {
+ return true ;
+ }
+ }
+ assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
+ assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
+ assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
+ assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
+ assert.result 1 2 3 : sequence.merge 1 2 3 : ;
+ assert.result 1 : sequence.merge 1 : 1 ;
+
+ assert.result foo-bar-baz : sequence.join foo bar baz : - ;
+ assert.result substandard : sequence.join sub stan dard ;
+ assert.result 3.0.1 : sequence.join 3.0.1 : - ;
+
+ assert.result 0 : sequence.length ;
+ assert.result 3 : sequence.length a b c ;
+ assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
+
+ assert.result 1 : sequence.length a ;
+ assert.result 10 : sequence.length a b c d e f g h i j ;
+ assert.result 11 : sequence.length a b c d e f g h i j k ;
+ assert.result 12 : sequence.length a b c d e f g h i j k l ;
+
+ local p2 = x ;
+ for local i in 1 2 3 4 5 6 7 8
+ {
+ p2 = $(p2) $(p2) ;
+ }
+ assert.result 256 : sequence.length $(p2) ;
+
+ assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
+
+ assert.result 5 : sequence.max-element 1 3 5 0 4 ;
+
+ assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
+
+ assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
+ }
+}
diff --git a/jam-files/boost-build/util/set.jam b/jam-files/boost-build/util/set.jam
new file mode 100644
index 000000000..fc179134f
--- /dev/null
+++ b/jam-files/boost-build/util/set.jam
@@ -0,0 +1,93 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+class set
+{
+ rule __init__ ( )
+ {
+ }
+
+ rule add ( elements * )
+ {
+ for local e in $(elements)
+ {
+ if ! $($(e))
+ {
+ $(e) = 1 ;
+ self.result += $(e) ;
+ }
+ }
+ }
+
+ rule contains ( element )
+ {
+ return $($(element)) ;
+ }
+
+ rule list ( )
+ {
+ return $(self.result) ;
+ }
+}
+
+
+
+# Returns the elements of set1 that are not in set2.
+#
+rule difference ( set1 * : set2 * )
+{
+ local result = ;
+ for local element in $(set1)
+ {
+ if ! ( $(element) in $(set2) )
+ {
+ result += $(element) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE set : difference ;
+
+
+# Removes all the items appearing in both set1 & set2.
+#
+rule intersection ( set1 * : set2 * )
+{
+ local result ;
+ for local v in $(set1)
+ {
+ if $(v) in $(set2)
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns whether set1 & set2 contain the same elements. Note that this ignores
+# any element ordering differences as well as any element duplication.
+#
+rule equal ( set1 * : set2 * )
+{
+ if $(set1) in $(set2) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 0 1 4 6 8 9 : difference 0 1 2 3 4 5 6 7 8 9 : 2 3 5 7 ;
+ assert.result 2 5 7 : intersection 0 1 2 4 5 6 7 8 9 : 2 3 5 7 ;
+
+ assert.true equal : ;
+ assert.true equal 1 1 2 3 : 3 2 2 1 ;
+ assert.false equal 2 3 : 3 2 2 1 ;
+}
diff --git a/jam-files/boost-build/util/string.jam b/jam-files/boost-build/util/string.jam
new file mode 100644
index 000000000..a39ed119e
--- /dev/null
+++ b/jam-files/boost-build/util/string.jam
@@ -0,0 +1,189 @@
+# Copyright 2002 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import regex ;
+
+
+# Characters considered whitespace, as a list.
+.whitespace-chars = " " " " "
+" ;
+
+# Characters considered whitespace, as a single string.
+.whitespace = $(.whitespace-chars:J="") ;
+
+
+# Returns the canonical set of whitespace characters, as a list.
+#
+rule whitespace-chars ( )
+{
+ return $(.whitespace-chars) ;
+}
+
+
+# Returns the canonical set of whitespace characters, as a single string.
+#
+rule whitespace ( )
+{
+ return $(.whitespace) ;
+}
+
+
+# Splits the given string into a list of strings composed of each character of
+# the string in sequence.
+#
+rule chars (
+ string # The string to split.
+ )
+{
+ local result ;
+ while $(string)
+ {
+ local s = [ MATCH (.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.*) : $(string) ] ;
+ string = $(s[9]) ;
+ result += $(s[1-8]) ;
+ }
+
+ # Trim off empty strings.
+ while $(result[1]) && ! $(result[-1])
+ {
+ result = $(result[1--2]) ;
+ }
+
+ return $(result) ;
+}
+
+
+# Apply a set of standard transformations to string to produce an abbreviation
+# no more than 5 characters long.
+#
+rule abbreviate ( string )
+{
+ local r = $(.abbreviated-$(string)) ;
+ if $(r)
+ {
+ return $(r) ;
+ }
+ # Anything less than 4 characters gets no abbreviation.
+ else if ! [ MATCH (....) : $(string) ]
+ {
+ .abbreviated-$(string) = $(string) ;
+ return $(string) ;
+ }
+ else
+ {
+ # Separate the initial letter in case it's a vowel.
+ local s1 = [ MATCH ^(.)(.*) : $(string) ] ;
+
+ # Drop trailing "ing".
+ local s2 = [ MATCH ^(.*)ing$ : $(s1[2]) ] ;
+ s2 ?= $(s1[2]) ;
+
+ # Reduce all doubled characters to one.
+ local last = "" ;
+ for local c in [ chars $(s2) ]
+ {
+ if $(c) != $(last)
+ {
+ r += $(c) ;
+ last = $(c) ;
+ }
+ }
+ s2 = $(r:J="") ;
+
+ # Chop all vowels out of the remainder.
+ s2 = [ regex.replace $(s2) [AEIOUaeiou] "" ] ;
+
+ # Shorten remaining consonants to 4 characters.
+ s2 = [ MATCH ^(.?.?.?.?) : $(s2) ] ;
+
+ # Glue the initial character back on to the front.
+ s2 = $(s1[1])$(s2) ;
+
+ .abbreviated-$(string) = $(s2) ;
+ return $(s2) ;
+ }
+}
+
+
+# Concatenates the given strings, inserting the given separator between each
+# string.
+#
+rule join (
+ strings * # The strings to join.
+ : separator ? # The optional separator.
+ )
+{
+ separator ?= "" ;
+ return $(strings:J=$(separator)) ;
+}
+
+
+# Split a string into whitespace separated words.
+#
+rule words (
+ string # The string to split.
+ : whitespace * # Optional, characters to consider as whitespace.
+ )
+{
+ whitespace = $(whitespace:J="") ;
+ whitespace ?= $(.whitespace) ;
+ local w = ;
+ while $(string)
+ {
+ string = [ MATCH "^[$(whitespace)]*([^$(whitespace)]*)(.*)" : $(string) ] ;
+ if $(string[1]) && $(string[1]) != ""
+ {
+ w += $(string[1]) ;
+ }
+ string = $(string[2]) ;
+ }
+ return $(w) ;
+}
+
+
+# Check that the given string is composed entirely of whitespace.
+#
+rule is-whitespace (
+ string ? # The string to test.
+ )
+{
+ if ! $(string) { return true ; }
+ else if $(string) = "" { return true ; }
+ else if [ MATCH "^([$(.whitespace)]+)$" : $(string) ] { return true ; }
+ else { return ; }
+}
+
+rule __test__ ( )
+{
+ import assert ;
+ assert.result a b c : chars abc ;
+
+ assert.result rntm : abbreviate runtime ;
+ assert.result ovrld : abbreviate overload ;
+ assert.result dbg : abbreviate debugging ;
+ assert.result async : abbreviate asynchronous ;
+ assert.result pop : abbreviate pop ;
+ assert.result aaa : abbreviate aaa ;
+ assert.result qck : abbreviate quack ;
+ assert.result sttc : abbreviate static ;
+
+ # Check boundary cases.
+ assert.result a : chars a ;
+ assert.result : chars "" ;
+ assert.result a b c d e f g h : chars abcdefgh ;
+ assert.result a b c d e f g h i : chars abcdefghi ;
+ assert.result a b c d e f g h i j : chars abcdefghij ;
+ assert.result a b c d e f g h i j k : chars abcdefghijk ;
+
+ assert.result a//b/c/d : join a "" b c d : / ;
+ assert.result abcd : join a "" b c d ;
+
+ assert.result a b c : words "a b c" ;
+
+ assert.true is-whitespace " " ;
+ assert.false is-whitespace " a b c " ;
+ assert.true is-whitespace "" ;
+ assert.true is-whitespace ;
+}
diff --git a/jam-files/boost-build/util/utility.jam b/jam-files/boost-build/util/utility.jam
new file mode 100644
index 000000000..c46747f58
--- /dev/null
+++ b/jam-files/boost-build/util/utility.jam
@@ -0,0 +1,235 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : is-instance ;
+import errors ;
+
+
+# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
+#
+rule apply-default-suffix ( suffix : list * )
+{
+ local result ;
+ for local i in $(list)
+ {
+ if $(i:S) = $(suffix)
+ {
+ result += $(i) ;
+ }
+ else
+ {
+ result += $(i)$(suffix) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'name' contains a dot, returns the part before the last dot. If 'name'
+# contains no dot, returns it unmodified.
+#
+rule basename ( name )
+{
+ if $(name:S)
+ {
+ name = $(name:B) ;
+ }
+ return $(name) ;
+}
+
+
+# Return the file of the caller of the rule that called caller-file.
+#
+rule caller-file ( )
+{
+ local bt = [ BACKTRACE ] ;
+ return $(bt[9]) ;
+}
+
+
+# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule equal ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).equal $(b) ] ;
+ }
+ else
+ {
+ if $(a) = $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule less ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).less $(b) ] ;
+ }
+ else
+ {
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Returns the textual representation of argument. If it is a class instance,
+# class its 'str' method. Otherwise, returns the argument.
+#
+rule str ( value )
+{
+ if [ is-instance $(value) ]
+ {
+ return [ $(value).str ] ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+# Accepts a list of gristed values and returns them ungristed. Reports an error
+# in case any of the passed parameters is not gristed, i.e. surrounded in angle
+# brackets < and >.
+#
+rule ungrist ( names * )
+{
+ local result ;
+ for local name in $(names)
+ {
+ local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
+ if ! $(stripped)
+ {
+ errors.error "in ungrist $(names) : $(name) is not of the form <.*>" ;
+ }
+ result += $(stripped) ;
+ }
+ return $(result) ;
+}
+
+
+# If the passed value is quoted, unquotes it. Otherwise returns the value
+# unchanged.
+#
+rule unquote ( value ? )
+{
+ local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
+ if $(match-result)
+ {
+ return $(match-result[2]) ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ assert.result 123 : str 123 ;
+
+ class test-class__
+ {
+ rule __init__ ( ) { }
+ rule str ( ) { return "str-test-class" ; }
+ rule less ( a ) { return "yes, of course!" ; }
+ rule equal ( a ) { return "not sure" ; }
+ }
+
+ assert.result "str-test-class" : str [ new test-class__ ] ;
+ assert.true less 1 2 ;
+ assert.false less 2 1 ;
+ assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
+ assert.true equal 1 1 ;
+ assert.false equal 1 2 ;
+ assert.result "not sure" : equal [ new test-class__ ] 1 ;
+
+ assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
+ ;
+
+ assert.result foo : basename foo ;
+ assert.result foo : basename foo.so ;
+ assert.result foo.so : basename foo.so.1 ;
+
+ assert.result : unquote ;
+ assert.result "" : unquote "" ;
+ assert.result foo : unquote foo ;
+ assert.result \"foo : unquote \"foo ;
+ assert.result foo\" : unquote foo\" ;
+ assert.result foo : unquote \"foo\" ;
+ assert.result \"foo\" : unquote \"\"foo\"\" ;
+
+ assert.result : ungrist ;
+ assert.result foo : ungrist <foo> ;
+ assert.result <foo> : ungrist <<foo>> ;
+ assert.result foo bar : ungrist <foo> <bar> ;
+
+ try ;
+ {
+ ungrist "" ;
+ }
+ catch "in ungrist : is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <> ;
+ }
+ catch "in ungrist <> : <> is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <foo ;
+ }
+ catch "in ungrist <foo : <foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo> ;
+ }
+ catch "in ungrist foo> : foo> is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo bar ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo <bar> ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <foo> bar ;
+ }
+ catch "in ungrist bar : bar is not of the form <.*>" ;
+}
diff --git a/jam-files/engine/Jambase b/jam-files/engine/Jambase
new file mode 100644
index 000000000..94f8fbde5
--- /dev/null
+++ b/jam-files/engine/Jambase
@@ -0,0 +1,2473 @@
+#
+# /+\
+# +\ Copyright 1993, 2000 Christopher Seiwald.
+# \+/
+#
+# This file is part of Jam - see jam.c for Copyright information.
+#
+
+# This file is ALSO:
+# Copyright 2001-2004 David Abrahams.
+# Copyright 2002-2004 Rene Rivera.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+if $(NT)
+{
+ SLASH ?= \\ ;
+}
+SLASH ?= / ;
+
+
+# Glob for patterns in the directories starting from the given start directory,
+# up to and including the root of the file-system. We stop globbing as soon as
+# we find at least one match.
+#
+rule find-to-root ( dir : patterns + )
+{
+ local globs = [ GLOB $(dir) : $(patterns) ] ;
+ while ! $(globs) && $(dir:P) != $(dir)
+ {
+ dir = $(dir:P) ;
+ globs = [ GLOB $(dir) : $(patterns) ] ;
+ }
+ return $(globs) ;
+}
+
+
+# This global will hold the location of the user's boost-build.jam file.
+.boost-build-file = ;
+
+# This global will hold the location of the build system bootstrap file.
+.bootstrap-file = ;
+
+# Remember the value of $(BOOST_BUILD_PATH) supplied to us by the user.
+BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;
+
+# On Unix only, when BOOST_BUILD_PATH is not supplied by the user, set it to a
+# sensible default value. This allows Boost.Build to work without any
+# environment variables, which is good in itself and also required by the Debian
+# Policy.
+if ! $(BOOST_BUILD_PATH) && $(UNIX)
+{
+ BOOST_BUILD_PATH = /usr/share/boost-build ;
+}
+
+
+rule _poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# This rule can be invoked from an optional user's boost-build.jam file to both
+# indicate where to find the build system files, and to load them. The path
+# indicated is relative to the location of the boost-build.jam file.
+#
+rule boost-build ( dir ? )
+{
+ if $(.bootstrap-file)
+ {
+ ECHO "Error: Illegal attempt to re-bootstrap the build system by invoking" ;
+ ECHO ;
+ ECHO " 'boost-build" $(dir) ";'" ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ # Add the given directory to the path so we can find the build system. If
+ # dir is empty, has no effect.
+ BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;
+
+ # We might have just modified the *global* value of BOOST_BUILD_PATH. The
+ # code that loads the rest of Boost.Build, in particular the site-config.jam
+ # and user-config.jam configuration files uses os.environ, so we need to
+ # update the value there.
+ _poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ # Try to find the build system bootstrap file 'bootstrap.jam'.
+ local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;
+ .bootstrap-file = $(bootstrap-file[1]) ;
+
+ # There is no bootstrap.jam we can find, exit with an error.
+ if ! $(.bootstrap-file)
+ {
+ ECHO "Unable to load Boost.Build: could not find build system." ;
+ ECHO --------------------------------------------------------- ;
+ ECHO "$(.boost-build-file) attempted to load the build system by invoking" ;
+ ECHO ;
+ ECHO " 'boost-build" $(dir) ";'" ;
+ ECHO ;
+ ECHO "but we were unable to find \"bootstrap.jam\" in the specified directory" ;
+ ECHO "or in BOOST_BUILD_PATH (searching "$(BOOST_BUILD_PATH:J=", ")")." ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
+ {
+ ECHO "notice: loading Boost.Build from"
+ [ NORMALIZE_PATH $(.bootstrap-file:D) ] ;
+ }
+
+ # Load the build system, now that we know where to start from.
+ include $(.bootstrap-file) ;
+}
+
+
+if [ MATCH .*(b2).* : $(ARGV[1]:BL) ]
+ || [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]
+ || $(BOOST_ROOT) # A temporary measure so Jam works with Boost.Build v1.
+{
+ # We attempt to load "boost-build.jam" by searching from the current
+ # invocation directory up to the root of the file-system.
+ #
+ # boost-build.jam is expected to invoke the "boost-build" rule to load the
+ # Boost.Build files.
+
+ local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;
+ local self = [ SELF_PATH ] ;
+ local boost-build-relative = ../../share/boost-build ;
+ local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;
+
+ local boost-build-files =
+ [ find-to-root [ PWD ] : boost-build.jam ]
+ [ GLOB $(self-based-path) : boost-build.jam ]
+ # Another temporary measure so Jam works with Boost.Build v1.
+ [ GLOB $(search-path) : boost-build.jam ] ;
+
+ .boost-build-file = $(boost-build-files[1]) ;
+
+ # There is no boost-build.jam we can find, exit with an error, and
+ # information.
+ if ! $(.boost-build-file)
+ {
+ ECHO "Unable to load Boost.Build: could not find \"boost-build.jam\"" ;
+ ECHO --------------------------------------------------------------- ;
+
+ if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]
+ {
+ ECHO "BOOST_ROOT must be set, either in the environment, or " ;
+ ECHO "on the command-line with -sBOOST_ROOT=..., to the root" ;
+ ECHO "of the boost installation." ;
+ ECHO ;
+ }
+
+ ECHO "Attempted search from" [ PWD ] "up to the root" ;
+ ECHO "at" $(self-based-path) ;
+ ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
+ {
+ ECHO "notice: found boost-build.jam at"
+ [ NORMALIZE_PATH $(.boost-build-file) ] ;
+ }
+
+ # Now load the boost-build.jam to get the build system loaded. This
+ # incidentaly loads the users jamfile and attempts to build targets.
+ #
+ # We also set it up so we can tell whether we are loading the new V2 system
+ # or the the old V1 system.
+ include $(.boost-build-file) ;
+
+ # Check that, at minimum, the bootstrap file was found.
+ if ! $(.bootstrap-file)
+ {
+ ECHO "Unable to load Boost.Build" ;
+ ECHO -------------------------- ;
+ ECHO "\"$(.boost-build-file)\" was found by searching from" [ PWD ] "up to the root" ;
+ ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
+ ECHO ;
+ ECHO "However, it failed to call the \"boost-build\" rule to indicate" ;
+ ECHO "the location of the build system." ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+}
+else
+{
+
+#
+# JAMBASE - jam 2.3 ruleset providing make(1)-like functionality
+#
+# Supports UNIX, NT, and VMS.
+#
+# 12/27/93 (seiwald) - purturb library sources with SOURCE_GRIST
+# 04/18/94 (seiwald) - use '?=' when setting OS specific vars
+# 04/21/94 (seiwald) - do RmTemps together
+# 05/05/94 (seiwald) - all supported C compilers support -o: relegate
+# RELOCATE as an option; set Ranlib to "" to disable it
+# 06/01/94 (seiwald) - new 'actions existing' to do existing sources
+# 08/25/94 (seiwald) - new ObjectCcFlags rule to append to per-target CCFLAGS
+# 08/29/94 (seiwald) - new ObjectHdrs rule to append to per-target HDRS
+# 09/19/94 (seiwald) - LinkLibraries and Undefs now append
+# - Rule names downshifted.
+# 10/06/94 (seiwald) - Dumb yyacc stuff moved into Jamfile.
+# 10/14/94 (seiwald) - (Crude) support for .s, .C, .cc, .cpp, and .f files.
+# 01/08/95 (seiwald) - Shell now handled with awk, not sed
+# 01/09/95 (seiwald) - Install* now take dest directory as target
+# 01/10/95 (seiwald) - All entries sorted.
+# 01/10/95 (seiwald) - NT support moved in, with LauraW's help.
+# 01/10/95 (seiwald) - VMS support moved in.
+# 02/06/95 (seiwald) - ObjectC++Flags and SubDirC++Flags added.
+# 02/07/95 (seiwald) - Iron out when HDRSEARCH uses "" or SEARCH_SOURCE.
+# 02/08/95 (seiwald) - SubDir works on VMS.
+# 02/14/95 (seiwald) - MkDir and entourage.
+# 04/30/95 (seiwald) - Use install -c flag so that it copies, not moves.
+# 07/10/95 (taylor) - Support for Microsoft C++.
+# 11/21/96 (peterk) - Support for BeOS
+# 07/19/99 (sickel) - Support for Mac OS X Server (and maybe client)
+# 02/18/00 (belmonte)- Support for Cygwin.
+
+# Special targets defined in this file:
+#
+# all - parent of first, shell, files, lib, exe
+# first - first dependency of 'all', for potential initialization
+# shell - parent of all Shell targets
+# files - parent of all File targets
+# lib - parent of all Library targets
+# exe - parent of all Main targets
+# dirs - parent of all MkDir targets
+# clean - removes all Shell, File, Library, and Main targets
+# uninstall - removes all Install targets
+#
+
+# Rules defined by this file:
+#
+# as obj.o : source.s ; .s -> .o
+# Bulk dir : files ; populate directory with many files
+# Cc obj.o : source.c ; .c -> .o
+# C++ obj.o : source.cc ; .cc -> .o
+# Clean clean : sources ; remove sources with 'jam clean'
+# File dest : source ; copy file
+# Fortran obj.o : source.f ; .f -> .o
+# GenFile source.c : program args ; make custom file
+# Hardlink target : source ; make link from source to target
+# HdrRule source : headers ; handle #includes
+# InstallInto dir : sources ; install any files
+# InstallBin dir : sources ; install binaries
+# InstallLib dir : sources ; install files
+# InstallFile dir : sources ; install files
+# InstallMan dir : sources ; install man pages
+# InstallShell dir : sources ; install shell scripts
+# Lex source.c : source.l ; .l -> .c
+# Library lib : source ; archive library from compiled sources
+# LibraryFromObjects lib : objects ; archive library from objects
+# LinkLibraries images : libraries ; bag libraries onto Mains
+# Main image : source ; link executable from compiled sources
+# MainFromObjects image : objects ; link executable from objects
+# MkDir dir ; make a directory, if not there
+# Object object : source ; compile object from source
+# ObjectCcFlags source : flags ; add compiler flags for object
+# ObjectC++Flags source : flags ; add compiler flags for object
+# ObjectHdrs source : dirs ; add include directories for object
+# Objects sources ; compile sources
+# RmTemps target : sources ; remove temp sources after target made
+# Setuid images ; mark executables Setuid
+# SubDir TOP d1 d2 ... ; start a subdirectory Jamfile
+# SubDirCcFlags flags ; add compiler flags until next SubDir
+# SubDirC++Flags flags ; add compiler flags until next SubDir
+# SubDirHdrs dirs ; add include dirs until next SubDir
+# SubInclude TOP d1 d2 ... ; include a subdirectory Jamfile
+# Shell exe : source ; make a shell executable
+# Undefines images : symbols ; save undef's for linking
+# UserObject object : source ; handle unknown suffixes for Object
+# Yacc source.c : source.y ; .y -> .c
+#
+# Utility rules that have no side effects (not supported):
+#
+# FAppendSuffix f1 f2 ... : $(SUF) ; return $(<) with suffixes
+# FConcat value ... ; return contatenated values
+# FDirName d1 d2 ... ; return path from root to dir
+# FGrist d1 d2 ... ; return d1!d2!...
+# FGristFiles value ; return $(value:G=$(SOURCE_GRIST))
+# FGristSourceFiles value ; return $(value:G=$(SOURCE_GRIST))
+# FRelPath d1 : d2 ; return rel path from d1 to d2
+# FSubDir d1 d2 ... ; return path to root
+#
+
+
+# Brief review of the jam language:
+#
+# Statements:
+# rule RULE - statements to process a rule
+# actions RULE - system commands to carry out target update
+#
+# Modifiers on actions:
+# together - multiple instances of same rule on target get executed
+# once with their sources ($(>)) concatenated
+# updated - refers to updated sources ($(>)) only
+# ignore - ignore return status of command
+# quietly - don't trace its execution unless verbose
+# piecemeal - iterate command each time with a small subset of $(>)
+# existing - refers to currently existing sources ($(>)) only
+# bind vars - subject to binding before expanding in actions
+#
+# Special rules:
+# ALWAYS - always build a target
+# DEPENDS - builds the dependency graph
+# ECHO - blurt out targets on stdout
+# EXIT - blurt out targets and exit
+# INCLUDES - marks sources as headers for target (a codependency)
+# NOCARE - don't panic if the target can't be built
+# NOUPDATE - create the target if needed but never update it
+# NOTFILE - ignore the timestamp of the target (it's not a file)
+# TEMPORARY - target need not be present if sources haven't changed
+#
+# Special variables set by jam:
+# $(<) - targets of a rule (to the left of the :)
+# $(>) - sources of a rule (to the right of the :)
+# $(xxx) - true on xxx (UNIX, VMS, NT, OS2, MAC)
+# $(OS) - name of OS - varies wildly
+# $(JAMVERSION) - version number (2.3)
+#
+# Special variables used by jam:
+# SEARCH - where to find something (used during binding and actions)
+# LOCATE - where to plop something not found with SEARCH
+# HDRRULE - rule to call to handle include files
+# HDRSCAN - egrep regex to extract include files
+#
+# Special targets:
+# all - default if none given on command line
+#
+
+# Initialize variables
+#
+
+#
+# OS specific variable settings
+#
+if $(NT)
+{
+ # the list of supported toolsets on Windows NT and Windows 95/98
+ #
+ local SUPPORTED_TOOLSETS = "BORLANDC" "VC7" "VISUALC" "VISUALC16" "INTELC" "WATCOM"
+ "MINGW" "LCC" ;
+
+ # this variable holds the current toolset
+ #
+ TOOLSET = "" ;
+
+ # if the JAM_TOOLSET environment variable is defined, check that it is
+ # one of our supported values
+ #
+ if $(JAM_TOOLSET)
+ {
+ local t ;
+
+ for t in $(SUPPORTED_TOOLSETS)
+ {
+ $(t) = $($(t):J=" ") ; # reconstitute paths with spaces in them
+ if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }
+ }
+
+ if ! $(TOOLSET)
+ {
+ ECHO "The JAM_TOOLSET environment variable is defined but its value" ;
+ ECHO "is invalid, please use one of the following:" ;
+ ECHO ;
+
+ for t in $(SUPPORTED_TOOLSETS) { ECHO " " $(t) ; }
+ EXIT ;
+ }
+ }
+
+ # if TOOLSET is empty, we'll try to detect the toolset from other
+ # environment variables to remain backwards compatible with Jam 2.3
+ #
+ if ! $(TOOLSET)
+ {
+ if $(BCCROOT)
+ {
+ TOOLSET = BORLANDC ;
+ BORLANDC = $(BCCROOT:J=" ") ;
+ }
+ else if $(MSVC)
+ {
+ TOOLSET = VISUALC16 ;
+ VISUALC16 = $(MSVC:J=" ") ;
+ }
+ else if $(MSVCNT)
+ {
+ TOOLSET = VISUALC ;
+ VISUALC = $(MSVCNT:J=" ") ;
+ }
+ else if $(MSVCDir)
+ {
+ TOOLSET = VISUALC ;
+ VISUALC = $(MSVCDir:J=" ") ;
+ }
+ else if $(MINGW)
+ {
+ TOOLSET = MINGW ;
+ }
+ else
+ {
+ ECHO "Jam cannot be run because, either:" ;
+ ECHO " a. You didn't set BOOST_ROOT to indicate the root of your" ;
+ ECHO " Boost installation." ;
+ ECHO " b. You are trying to use stock Jam but didn't indicate which" ;
+ ECHO " compilation toolset to use. To do so, follow these simple" ;
+ ECHO " instructions:" ;
+ ECHO ;
+ ECHO " - define one of the following environment variable, with the" ;
+ ECHO " appropriate value according to this list:" ;
+ ECHO ;
+ ECHO " Variable Toolset Description" ;
+ ECHO ;
+ ECHO " BORLANDC Borland C++ BC++ install path" ;
+ ECHO " VISUALC Microsoft Visual C++ VC++ install path" ;
+ ECHO " VISUALC16 Microsoft Visual C++ 16 bit VC++ 16 bit install" ;
+ ECHO " INTELC Intel C/C++ IC++ install path" ;
+ ECHO " WATCOM Watcom C/C++ Watcom install path" ;
+ ECHO " MINGW MinGW (gcc) MinGW install path" ;
+ ECHO " LCC Win32-LCC LCC-Win32 install path" ;
+ ECHO ;
+ ECHO " - define the JAM_TOOLSET environment variable with the *name*" ;
+ ECHO " of the toolset variable you want to use." ;
+ ECHO ;
+ ECHO " e.g.: set VISUALC=C:\\Visual6" ;
+ ECHO " set JAM_TOOLSET=VISUALC" ;
+ EXIT ;
+ }
+ }
+
+ CP ?= copy ;
+ RM ?= del /f/q ;
+ SLASH ?= \\ ;
+ SUFLIB ?= .lib ;
+ SUFOBJ ?= .obj ;
+ SUFEXE ?= .exe ;
+
+ if $(TOOLSET) = BORLANDC
+ {
+ ECHO "Compiler is Borland C++" ;
+
+ AR ?= tlib /C /P64 ;
+ CC ?= bcc32 ;
+ CCFLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus ;
+ C++ ?= bcc32 ;
+ C++FLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus -P ;
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= $(CCFLAGS) ;
+ STDLIBPATH ?= $(BORLANDC)\\lib ;
+ STDHDRS ?= $(BORLANDC)\\include ;
+ NOARSCAN ?= true ;
+ }
+ else if $(TOOLSET) = VISUALC16
+ {
+ ECHO "Compiler is Microsoft Visual C++ 16 bit" ;
+
+ AR ?= lib /nologo ;
+ CC ?= cl /nologo ;
+ CCFLAGS ?= /D \"WIN\" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= $(CCFLAGS) ;
+ LINKLIBS ?=
+ \"$(VISUALC16)\\lib\\mlibce.lib\"
+ \"$(VISUALC16)\\lib\\oldnames.lib\"
+ ;
+ LINKLIBS ?= ;
+ NOARSCAN ?= true ;
+ OPTIM ?= "" ;
+ STDHDRS ?= $(VISUALC16)\\include ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = VISUALC
+ {
+ ECHO "Compiler is Microsoft Visual C++" ;
+
+ AR ?= lib ;
+ AS ?= masm386 ;
+ CC ?= cl /nologo ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= link /nologo ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= \"$(VISUALC)\\lib\\advapi32.lib\"
+ # $(VISUALC)\\lib\\libc.lib
+ # $(VISUALC)\\lib\\oldnames.lib
+ \"$(VISUALC)\\lib\\gdi32.lib\"
+ \"$(VISUALC)\\lib\\user32.lib\"
+ \"$(VISUALC)\\lib\\kernel32.lib\" ;
+ OPTIM ?= "" ;
+ STDHDRS ?= $(VISUALC)\\include ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = VC7
+ {
+ ECHO "Compiler is Microsoft Visual C++ .NET" ;
+
+ AR ?= lib ;
+ AS ?= masm386 ;
+ CC ?= cl /nologo ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= link /nologo ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= \"$(VISUALC)\\PlatformSDK\\lib\\advapi32.lib\"
+ # $(VISUALC)\\lib\\libc.lib
+ # $(VISUALC)\\lib\\oldnames.lib
+ \"$(VISUALC)\\PlatformSDK\\lib\\gdi32.lib\"
+ \"$(VISUALC)\\PlatformSDK\\lib\\user32.lib\"
+ \"$(VISUALC)\\PlatformSDK\\lib\\kernel32.lib\" ;
+ OPTIM ?= "" ;
+ STDHDRS ?= \"$(VISUALC)\\include\"
+ \"$(VISUALC)\\PlatformSDK\\include\" ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = INTELC
+ {
+ ECHO "Compiler is Intel C/C++" ;
+
+ if ! $(VISUALC)
+ {
+ ECHO "As a special exception, when using the Intel C++ compiler, you need" ;
+ ECHO "to define the VISUALC environment variable to indicate the location" ;
+ ECHO "of your Visual C++ installation. Aborting.." ;
+ EXIT ;
+ }
+
+ AR ?= lib ;
+ AS ?= masm386 ;
+ CC ?= icl /nologo ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= link /nologo ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= $(VISUALC)\\lib\\advapi32.lib
+ # $(VISUALC)\\lib\\libc.lib
+ # $(VISUALC)\\lib\\oldnames.lib
+ $(VISUALC)\\lib\\kernel32.lib
+ ;
+ OPTIM ?= "" ;
+ STDHDRS ?= $(INTELC)\include $(VISUALC)\\include ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = WATCOM
+ {
+ ECHO "Compiler is Watcom C/C++" ;
+
+ AR ?= wlib ;
+ CC ?= wcc386 ;
+ CCFLAGS ?= /zq /DWIN32 /I$(WATCOM)\\h ; # zq=quiet
+ C++ ?= wpp386 ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ CP ?= copy ;
+ DOT ?= . ;
+ DOTDOT ?= .. ;
+ LINK ?= wcl386 ;
+ LINKFLAGS ?= /zq ; # zq=quiet
+ LINKLIBS ?= ;
+ MV ?= move ;
+ NOARSCAN ?= true ;
+ OPTIM ?= ;
+ RM ?= del /f ;
+ SLASH ?= \\ ;
+ STDHDRS ?= $(WATCOM)\\h $(WATCOM)\\h\\nt ;
+ SUFEXE ?= .exe ;
+ SUFLIB ?= .lib ;
+ SUFOBJ ?= .obj ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = MINGW
+ {
+ ECHO "Compiler is GCC with Mingw" ;
+
+ AR ?= ar -ru ;
+ CC ?= gcc ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= "" ;
+ OPTIM ?= ;
+ SUFOBJ = .o ;
+ SUFLIB = .a ;
+ SLASH = / ;
+# NOARSCAN ?= true ;
+ }
+ else if $(TOOLSET) = LCC
+ {
+ ECHO "Compiler is Win32-LCC" ;
+
+ AR ?= lcclib ;
+ CC ?= lcc ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= lcclnk ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= "" ;
+ OPTIM ?= ;
+ NOARSCAN = true ;
+ }
+ else
+ {
+#
+# XXX: We need better comments here !!
+#
+ EXIT On NT, set BCCROOT, MSVCNT, MINGW or MSVC to the root of the
+ Borland or Microsoft directories. ;
+ }
+
+}
+else if $(OS2)
+{
+ # the list of supported toolsets on Windows NT and Windows 95/98
+ #
+ local SUPPORTED_TOOLSETS = "EMX" "WATCOM" ;
+
+ # this variable holds the current toolset
+ #
+ TOOLSET = "" ;
+
+ # if the JAM_TOOLSET environment variable is defined, check that it is
+ # one of our supported values
+ #
+ if $(JAM_TOOLSET)
+ {
+ local t ;
+
+ for t in $(SUPPORTED_TOOLSETS)
+ {
+ $(t) = $($(t):J=" ") ; # reconstitute paths with spaces in them
+ if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }
+ }
+
+ if ! $(TOOLSET)
+ {
+ ECHO "The JAM_TOOLSET environment variable is defined but its value" ;
+ ECHO "is invalid, please use one of the following:" ;
+ ECHO ;
+
+ for t in $(SUPPORTED_TOOLSETS) { ECHO " " $(t) ; }
+ EXIT ;
+ }
+ }
+
+ # if TOOLSET is empty, we'll try to detect the toolset from other
+ # environment variables to remain backwards compatible with Jam 2.3
+ #
+ if ! $(TOOLSET)
+ {
+ if $(watcom)
+ {
+ WATCOM = $(watcom:J=" ") ;
+ TOOLSET = WATCOM ;
+ }
+ else
+ {
+ ECHO "Jam cannot be run because you didn't indicate which compilation toolset" ;
+ ECHO "to use. To do so, follow these simple instructions:" ;
+ ECHO ;
+ ECHO " - define one of the following environment variable, with the" ;
+ ECHO " appropriate value according to this list:" ;
+ ECHO ;
+ ECHO " Variable Toolset Description" ;
+ ECHO ;
+ ECHO " WATCOM Watcom C/C++ Watcom install path" ;
+ ECHO " EMX EMX (gcc) EMX install path" ;
+ ECHO " VISUALAGE IBM Visual Age C/C++ VisualAge install path" ;
+ ECHO ;
+ ECHO " - define the JAM_TOOLSET environment variable with the *name*" ;
+ ECHO " of the toolset variable you want to use." ;
+ ECHO ;
+ ECHO " e.g.: set WATCOM=C:\WATCOM" ;
+ ECHO " set JAM_TOOLSET=WATCOM" ;
+ ECHO ;
+ EXIT ;
+ }
+ }
+
+ RM = del /f ;
+ CP = copy ;
+ MV ?= move ;
+ DOT ?= . ;
+ DOTDOT ?= .. ;
+ SUFLIB ?= .lib ;
+ SUFOBJ ?= .obj ;
+ SUFEXE ?= .exe ;
+
+ if $(TOOLSET) = WATCOM
+ {
+ AR ?= wlib ;
+ BINDIR ?= \\os2\\apps ;
+ CC ?= wcc386 ;
+ CCFLAGS ?= /zq /DOS2 /I$(WATCOM)\\h ; # zq=quiet
+ C++ ?= wpp386 ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= wcl386 ;
+ LINKFLAGS ?= /zq ; # zq=quiet
+ LINKLIBS ?= ;
+ NOARSCAN ?= true ;
+ OPTIM ?= ;
+ SLASH ?= \\ ;
+ STDHDRS ?= $(WATCOM)\\h ;
+ UNDEFFLAG ?= "/u _" ;
+ }
+ else if $(TOOLSET) = EMX
+ {
+ ECHO "Compiler is GCC-EMX" ;
+ AR ?= ar -ru ;
+ CC ?= gcc ;
+ CCFLAGS ?= "" ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= "" ;
+ OPTIM ?= ;
+ SUFOBJ = .o ;
+ SUFLIB = .a ;
+ UNDEFFLAG ?= "-U" ;
+ SLASH = / ;
+# NOARSCAN ?= true ;
+ }
+ else
+ {
+ # should never happen
+ EXIT "Sorry, but the $(JAM_TOOLSET) toolset isn't supported for now" ;
+ }
+}
+else if $(VMS)
+{
+ C++ ?= cxx ;
+ C++FLAGS ?= ;
+ CC ?= cc ;
+ CCFLAGS ?= ;
+ CHMOD ?= set file/prot= ;
+ CP ?= copy/replace ;
+ CRELIB ?= true ;
+ DOT ?= [] ;
+ DOTDOT ?= [-] ;
+ EXEMODE ?= (w:e) ;
+ FILEMODE ?= (w:r) ;
+ HDRS ?= ;
+ LINK ?= link ;
+ LINKFLAGS ?= "" ;
+ LINKLIBS ?= ;
+ MKDIR ?= create/dir ;
+ MV ?= rename ;
+ OPTIM ?= "" ;
+ RM ?= delete ;
+ RUNVMS ?= mcr ;
+ SHELLMODE ?= (w:er) ;
+ SLASH ?= . ;
+ STDHDRS ?= decc$library_include ;
+ SUFEXE ?= .exe ;
+ SUFLIB ?= .olb ;
+ SUFOBJ ?= .obj ;
+
+ switch $(OS)
+ {
+ case OPENVMS : CCFLAGS ?= /stand=vaxc ;
+ case VMS : LINKLIBS ?= sys$library:vaxcrtl.olb/lib ;
+ }
+}
+else if $(MAC)
+{
+ local OPT ;
+
+ CW ?= "{CW}" ;
+
+ MACHDRS ?=
+ "$(UMACHDRS):Universal:Interfaces:CIncludes"
+ "$(CW):MSL:MSL_C:MSL_Common:Include"
+ "$(CW):MSL:MSL_C:MSL_MacOS:Include" ;
+
+ MACLIBS ?=
+ "$(CW):MacOS Support:Universal:Libraries:StubLibraries:Interfacelib"
+ "$(CW):MacOS Support:Universal:Libraries:StubLibraries:Mathlib" ;
+
+ MPWLIBS ?=
+ "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib"
+ "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW.Lib" ;
+
+ MPWNLLIBS ?=
+ "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib"
+ "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW(NL).Lib" ;
+
+ SIOUXHDRS ?= ;
+
+ SIOUXLIBS ?=
+ "$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL RuntimePPC.lib"
+ "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL SIOUX.PPC.Lib"
+ "$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC.Lib" ;
+
+ C++ ?= mwcppc ;
+ C++FLAGS ?= -w off -nomapcr ;
+ CC ?= mwcppc ;
+ CCFLAGS ?= -w off -nomapcr ;
+ CP ?= duplicate -y ;
+ DOT ?= ":" ;
+ DOTDOT ?= "::" ;
+ HDRS ?= $(MACHDRS) $(MPWHDRS) ;
+ LINK ?= mwlinkppc ;
+ LINKFLAGS ?= -mpwtool -warn ;
+ LINKLIBS ?= $(MACLIBS) $(MPWLIBS) ;
+ MKDIR ?= newfolder ;
+ MV ?= rename -y ;
+ NOARSCAN ?= true ;
+ OPTIM ?= ;
+ RM ?= delete -y ;
+ SLASH ?= ":" ;
+ STDHDRS ?= ;
+ SUFLIB ?= .lib ;
+ SUFOBJ ?= .o ;
+}
+else if $(OS) = BEOS && $(METROWERKS)
+{
+ AR ?= mwld -xml -o ;
+ BINDIR ?= /boot/apps ;
+ CC ?= mwcc ;
+ CCFLAGS ?= -nosyspath ;
+ C++ ?= $(CC) ;
+ C++FLAGS ?= -nosyspath ;
+ FORTRAN ?= "" ;
+ LIBDIR ?= /boot/develop/libraries ;
+ LINK ?= mwld ;
+ LINKFLAGS ?= "" ;
+ MANDIR ?= /boot/documentation/"Shell Tools"/HTML ;
+ NOARSCAN ?= true ;
+ STDHDRS ?= /boot/develop/headers/posix ;
+}
+else if $(OS) = BEOS
+{
+ BINDIR ?= /boot/apps ;
+ CC ?= gcc ;
+ C++ ?= $(CC) ;
+ FORTRAN ?= "" ;
+ LIBDIR ?= /boot/develop/libraries ;
+ LINK ?= gcc ;
+ LINKLIBS ?= -lnet ;
+ NOARSCAN ?= true ;
+ STDHDRS ?= /boot/develop/headers/posix ;
+}
+else if $(UNIX)
+{
+ switch $(OS)
+ {
+ case AIX :
+ LINKLIBS ?= -lbsd ;
+
+ case AMIGA :
+ CC ?= gcc ;
+ YACC ?= "bison -y" ;
+
+ case CYGWIN :
+ CC ?= gcc ;
+ CCFLAGS += -D__cygwin__ ;
+ LEX ?= flex ;
+ RANLIB ?= "" ;
+ SUFEXE ?= .exe ;
+ YACC ?= "bison -y" ;
+
+ case DGUX :
+ RANLIB ?= "" ;
+ RELOCATE ?= true ;
+
+ case HPUX :
+ YACC = ;
+ CFLAGS += -Ae ;
+ CCFLAGS += -Ae ;
+ RANLIB ?= "" ;
+
+ case INTERIX :
+ CC ?= gcc ;
+ RANLIB ?= "" ;
+
+ case IRIX :
+ RANLIB ?= "" ;
+
+ case MPEIX :
+ CC ?= gcc ;
+ C++ ?= gcc ;
+ CCFLAGS += -D_POSIX_SOURCE ;
+ HDRS += /usr/include ;
+ RANLIB ?= "" ;
+ NOARSCAN ?= true ;
+ NOARUPDATE ?= true ;
+
+ case MVS :
+ RANLIB ?= "" ;
+
+ case NEXT :
+ AR ?= libtool -o ;
+ RANLIB ?= "" ;
+
+ case MACOSX :
+ AR ?= libtool -o ;
+ C++ ?= c++ ;
+ MANDIR ?= /usr/local/share/man ;
+ RANLIB ?= "" ;
+
+ case NCR :
+ RANLIB ?= "" ;
+
+ case PTX :
+ RANLIB ?= "" ;
+
+ case QNX :
+ AR ?= wlib ;
+ CC ?= cc ;
+ CCFLAGS ?= -Q ; # quiet
+ C++ ?= $(CC) ;
+ C++FLAGS ?= -Q ; # quiet
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= -Q ; # quiet
+ NOARSCAN ?= true ;
+ RANLIB ?= "" ;
+
+ case SCO :
+ RANLIB ?= "" ;
+ RELOCATE ?= true ;
+
+ case SINIX :
+ RANLIB ?= "" ;
+
+ case SOLARIS :
+ RANLIB ?= "" ;
+ AR ?= "/usr/ccs/bin/ar ru" ;
+
+ case UNICOS :
+ NOARSCAN ?= true ;
+ OPTIM ?= -O0 ;
+
+ case UNIXWARE :
+ RANLIB ?= "" ;
+ RELOCATE ?= true ;
+ }
+
+ # UNIX defaults
+
+ CCFLAGS ?= ;
+ C++FLAGS ?= $(CCFLAGS) ;
+ CHMOD ?= chmod ;
+ CHGRP ?= chgrp ;
+ CHOWN ?= chown ;
+ LEX ?= lex ;
+ LINKFLAGS ?= $(CCFLAGS) ;
+ LINKLIBS ?= ;
+ OPTIM ?= -O ;
+ RANLIB ?= ranlib ;
+ YACC ?= yacc ;
+ YACCFILES ?= y.tab ;
+ YACCFLAGS ?= -d ;
+}
+
+#
+# General defaults; a lot like UNIX
+#
+
+ AR ?= ar ru ;
+ AS ?= as ;
+ ASFLAGS ?= ;
+ AWK ?= awk ;
+ BINDIR ?= /usr/local/bin ;
+ C++ ?= cc ;
+ C++FLAGS ?= ;
+ CC ?= cc ;
+ CCFLAGS ?= ;
+ CP ?= cp -f ;
+ CRELIB ?= ;
+ DOT ?= . ;
+ DOTDOT ?= .. ;
+ EXEMODE ?= 711 ;
+ FILEMODE ?= 644 ;
+ FORTRAN ?= f77 ;
+ FORTRANFLAGS ?= ;
+ HDRS ?= ;
+ INSTALLGRIST ?= installed ;
+ JAMFILE ?= Jamfile ;
+ JAMRULES ?= Jamrules ;
+ LEX ?= ;
+ LIBDIR ?= /usr/local/lib ;
+ LINK ?= $(CC) ;
+ LINKFLAGS ?= ;
+ LINKLIBS ?= ;
+ LN ?= ln ;
+ MANDIR ?= /usr/local/man ;
+ MKDIR ?= mkdir ;
+ MV ?= mv -f ;
+ OPTIM ?= ;
+ RCP ?= rcp ;
+ RM ?= rm -f ;
+ RSH ?= rsh ;
+ SED ?= sed ;
+ SHELLHEADER ?= "#!/bin/sh" ;
+ SHELLMODE ?= 755 ;
+ SLASH ?= / ;
+ STDHDRS ?= /usr/include ;
+ SUFEXE ?= "" ;
+ SUFLIB ?= .a ;
+ SUFOBJ ?= .o ;
+ UNDEFFLAG ?= "-u _" ;
+ YACC ?= ;
+ YACCFILES ?= ;
+ YACCFLAGS ?= ;
+
+ HDRPATTERN =
+ "^[ ]*#[ ]*include[ ]*[<\"]([^\">]*)[\">].*$" ;
+
+ OSFULL = $(OS)$(OSVER)$(OSPLAT) $(OS)$(OSPLAT) $(OS)$(OSVER) $(OS) ;
+
+
+#
+# Base dependencies - first for "bootstrap" kinds of rules
+#
+
+DEPENDS all : shell files lib exe obj ;
+DEPENDS all shell files lib exe obj : first ;
+NOTFILE all first shell files lib exe obj dirs clean uninstall ;
+ALWAYS clean uninstall ;
+
+#
+# Rules
+#
+
+rule As
+{
+ DEPENDS $(<) : $(>) ;
+ ASFLAGS on $(<) += $(ASFLAGS) $(SUBDIRASFLAGS) ;
+}
+
+rule Bulk
+{
+ local i ;
+
+ for i in $(>)
+ {
+ File $(i:D=$(<)) : $(i) ;
+ }
+}
+
+rule Cc
+{
+ local _h ;
+
+ DEPENDS $(<) : $(>) ;
+
+ # Just to clarify here: this sets the per-target CCFLAGS to
+ # be the current value of (global) CCFLAGS and SUBDIRCCFLAGS.
+
+ CCFLAGS on $(<) += $(CCFLAGS) $(SUBDIRCCFLAGS) ;
+
+ # If the compiler's -o flag doesn't work, relocate the .o
+
+ if $(RELOCATE)
+ {
+ CcMv $(<) : $(>) ;
+ }
+
+ _h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
+
+ if $(VMS) && $(_h)
+ {
+ SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ;
+ }
+ else if $(MAC) && $(_h)
+ {
+ local _i _j ;
+ _j = $(_h[1]) ;
+ for _i in $(_h[2-])
+ {
+ _j = $(_j),$(_i) ;
+ }
+ MACINC on $(<) = \"$(_j)\" ;
+ }
+}
+
+rule C++
+{
+ local _h ;
+
+ DEPENDS $(<) : $(>) ;
+ C++FLAGS on $(<) += $(C++FLAGS) $(SUBDIRC++FLAGS) ;
+
+ if $(RELOCATE)
+ {
+ CcMv $(<) : $(>) ;
+ }
+
+ _h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
+
+ if $(VMS) && $(_h)
+ {
+ SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ;
+ }
+ else if $(MAC) && $(_h)
+ {
+ local _i _j ;
+ _j = $(_h[1]) ;
+ for _i in $(_h[2-])
+ {
+ _j = $(_j),$(_i) ;
+ }
+ MACINC on $(<) = \"$(_j)\" ;
+ }
+}
+
+rule Chmod
+{
+ if $(CHMOD) { Chmod1 $(<) ; }
+}
+
+rule File
+{
+ DEPENDS files : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+ MODE on $(<) = $(FILEMODE) ;
+ Chmod $(<) ;
+}
+
+rule Fortran
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+rule GenFile
+{
+ local _t = [ FGristSourceFiles $(<) ] ;
+ local _s = [ FAppendSuffix $(>[1]) : $(SUFEXE) ] ;
+ Depends $(_t) : $(_s) $(>[2-]) ;
+ GenFile1 $(_t) : $(_s) $(>[2-]) ;
+ Clean clean : $(_t) ;
+}
+
+rule GenFile1
+{
+ MakeLocate $(<) : $(LOCATE_SOURCE) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+}
+
+rule HardLink
+{
+ DEPENDS files : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+}
+
+rule HdrMacroFile
+{
+ # HdrMacroFile file ;
+ #
+ # this rule is used to indicate that a given file contains definitions
+ # for filename macros (e.g. "#define MYFILE_H <myfile.h>") that can
+ # later be used in #include statements in the rest of the source
+ #
+ # theses files must be parsed before any make is tried..
+ #
+ HDRMACRO $(<) ;
+}
+
+rule HdrRule
+{
+ # HdrRule source : headers ;
+
+ # N.B. This rule is called during binding, potentially after
+ # the fate of many targets has been determined, and must be
+ # used with caution: don't add dependencies to unrelated
+ # targets, and don't set variables on $(<).
+
+ # Tell Jam that anything depending on $(<) also depends on $(>),
+ # set SEARCH so Jam can find the headers, but then say we don't
+ # care if we can't actually find the headers (they may have been
+ # within ifdefs),
+
+ local s ;
+
+ if $(HDRGRIST)
+ {
+ s = $(>:G=$(HDRGRIST)) ;
+ } else {
+ s = $(>) ;
+ }
+
+ INCLUDES $(<) : $(s) ;
+ SEARCH on $(s) = $(HDRSEARCH) ;
+ NOCARE $(s) ;
+
+ # Propagate on $(<) to $(>)
+
+ HDRSEARCH on $(s) = $(HDRSEARCH) ;
+ HDRSCAN on $(s) = $(HDRSCAN) ;
+ HDRRULE on $(s) = $(HDRRULE) ;
+ HDRGRIST on $(s) = $(HDRGRIST) ;
+}
+
+rule InstallInto
+{
+ # InstallInto dir : sources ;
+
+ local i t ;
+
+ t = $(>:G=$(INSTALLGRIST)) ;
+
+ # Arrange for jam install
+ # Arrange for jam uninstall
+ # sources are in SEARCH_SOURCE
+ # targets are in dir
+
+ Depends install : $(t) ;
+ Clean uninstall : $(t) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+ MakeLocate $(t) : $(<) ;
+
+ # For each source, make gristed target name
+ # and Install, Chmod, Chown, and Chgrp
+
+ for i in $(>)
+ {
+ local tt = $(i:G=$(INSTALLGRIST)) ;
+
+ Depends $(tt) : $(i) ;
+ Install $(tt) : $(i) ;
+ Chmod $(tt) ;
+
+ if $(OWNER) && $(CHOWN)
+ {
+ Chown $(tt) ;
+ OWNER on $(tt) = $(OWNER) ;
+ }
+
+ if $(GROUP) && $(CHGRP)
+ {
+ Chgrp $(tt) ;
+ GROUP on $(tt) = $(GROUP) ;
+ }
+ }
+}
+
+rule InstallBin
+{
+ local _t = [ FAppendSuffix $(>) : $(SUFEXE) ] ;
+
+ InstallInto $(<) : $(_t) ;
+ MODE on $(_t:G=installed) = $(EXEMODE) ;
+}
+
+rule InstallFile
+{
+ InstallInto $(<) : $(>) ;
+ MODE on $(>:G=installed) = $(FILEMODE) ;
+}
+
+rule InstallLib
+{
+ InstallInto $(<) : $(>) ;
+ MODE on $(>:G=installed) = $(FILEMODE) ;
+}
+
+rule InstallMan
+{
+ # Really this just strips the . from the suffix
+
+ local i s d ;
+
+ for i in $(>)
+ {
+ switch $(i:S)
+ {
+ case .1 : s = 1 ; case .2 : s = 2 ; case .3 : s = 3 ;
+ case .4 : s = 4 ; case .5 : s = 5 ; case .6 : s = 6 ;
+ case .7 : s = 7 ; case .8 : s = 8 ; case .l : s = l ;
+ case .n : s = n ; case .man : s = 1 ;
+ }
+
+ d = man$(s) ;
+
+ InstallInto $(d:R=$(<)) : $(i) ;
+ }
+
+ MODE on $(>:G=installed) = $(FILEMODE) ;
+}
+
+rule InstallShell
+{
+ InstallInto $(<) : $(>) ;
+ MODE on $(>:G=installed) = $(SHELLMODE) ;
+}
+
+rule Lex
+{
+ LexMv $(<) : $(>) ;
+ DEPENDS $(<) : $(>) ;
+ MakeLocate $(<) : $(LOCATE_SOURCE) ;
+ Clean clean : $(<) ;
+}
+
+rule Library
+{
+ LibraryFromObjects $(<) : $(>:S=$(SUFOBJ)) ;
+ Objects $(>) ;
+}
+
+rule LibraryFromObjects
+{
+ local _i _l _s ;
+
+ # Add grist to file names
+
+ _s = [ FGristFiles $(>) ] ;
+ _l = $(<:S=$(SUFLIB)) ;
+
+ # library depends on its member objects
+
+ if $(KEEPOBJS)
+ {
+ DEPENDS obj : $(_s) ;
+ }
+ else
+ {
+ DEPENDS lib : $(_l) ;
+ }
+
+ # Set LOCATE for the library and its contents. The bound
+ # value shows up as $(NEEDLIBS) on the Link actions.
+ # For compatibility, we only do this if the library doesn't
+ # already have a path.
+
+ if ! $(_l:D)
+ {
+ MakeLocate $(_l) $(_l)($(_s:BS)) : $(LOCATE_TARGET) ;
+ }
+
+ if $(NOARSCAN)
+ {
+ # If we can't scan the library to timestamp its contents,
+ # we have to just make the library depend directly on the
+ # on-disk object files.
+
+ DEPENDS $(_l) : $(_s) ;
+ }
+ else
+ {
+ # If we can scan the library, we make the library depend
+ # on its members and each member depend on the on-disk
+ # object file.
+
+ DEPENDS $(_l) : $(_l)($(_s:BS)) ;
+
+ for _i in $(_s)
+ {
+ DEPENDS $(_l)($(_i:BS)) : $(_i) ;
+ }
+ }
+
+ Clean clean : $(_l) ;
+
+ if $(CRELIB) { CreLib $(_l) : $(_s[1]) ; }
+
+ Archive $(_l) : $(_s) ;
+
+ if $(RANLIB) { Ranlib $(_l) ; }
+
+ # If we can't scan the library, we have to leave the .o's around.
+
+ if ! ( $(NOARSCAN) || $(KEEPOBJS) ) { RmTemps $(_l) : $(_s) ; }
+}
+
+rule Link
+{
+ MODE on $(<) = $(EXEMODE) ;
+ Chmod $(<) ;
+}
+
+rule LinkLibraries
+{
+ # make library dependencies of target
+ # set NEEDLIBS variable used by 'actions Main'
+
+ local _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;
+
+ DEPENDS $(_t) : $(>:S=$(SUFLIB)) ;
+ NEEDLIBS on $(_t) += $(>:S=$(SUFLIB)) ;
+}
+
+rule Main
+{
+ MainFromObjects $(<) : $(>:S=$(SUFOBJ)) ;
+ Objects $(>) ;
+}
+
+rule MainFromObjects
+{
+ local _s _t ;
+
+ # Add grist to file names
+ # Add suffix to exe
+
+ _s = [ FGristFiles $(>) ] ;
+ _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;
+
+ if $(_t) != $(<)
+ {
+ DEPENDS $(<) : $(_t) ;
+ NOTFILE $(<) ;
+ }
+
+ # make compiled sources a dependency of target
+
+ DEPENDS exe : $(_t) ;
+ DEPENDS $(_t) : $(_s) ;
+ MakeLocate $(_t) : $(LOCATE_TARGET) ;
+
+ Clean clean : $(_t) ;
+
+ Link $(_t) : $(_s) ;
+}
+
+rule MakeLocate
+{
+ if $(>)
+ {
+ LOCATE on $(<) = $(>) ;
+ Depends $(<) : $(>[1]) ;
+ MkDir $(>[1]) ;
+ }
+}
+
+rule MkDir
+{
+ # If dir exists, don't update it
+ # Do this even for $(DOT).
+
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ local s ;
+
+ # Cheesy gate to prevent multiple invocations on same dir
+ # MkDir1 has the actions
+ # Arrange for jam dirs
+
+ $(<)-mkdir = true ;
+ MkDir1 $(<) ;
+ Depends dirs : $(<) ;
+
+ # Recursively make parent directories.
+ # $(<:P) = $(<)'s parent, & we recurse until root
+
+ s = $(<:P) ;
+
+ if $(NT)
+ {
+ switch $(s)
+ {
+ case *: : s = ;
+ case *:\\ : s = ;
+ }
+ }
+
+ if $(s) && $(s) != $(<)
+ {
+ Depends $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else if $(s)
+ {
+ NOTFILE $(s) ;
+ }
+
+ }
+}
+
+rule Object
+{
+ local h ;
+
+ # locate object and search for source, if wanted
+
+ Clean clean : $(<) ;
+
+ MakeLocate $(<) : $(LOCATE_TARGET) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+
+ # Save HDRS for -I$(HDRS) on compile.
+ # We shouldn't need -I$(SEARCH_SOURCE) as cc can find headers
+ # in the .c file's directory, but generated .c files (from
+ # yacc, lex, etc) are located in $(LOCATE_TARGET), possibly
+ # different from $(SEARCH_SOURCE).
+
+ HDRS on $(<) = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;
+
+ # handle #includes for source: Jam scans for headers with
+ # the regexp pattern $(HDRSCAN) and then invokes $(HDRRULE)
+ # with the scanned file as the target and the found headers
+ # as the sources. HDRSEARCH is the value of SEARCH used for
+ # the found header files. Finally, if jam must deal with
+ # header files of the same name in different directories,
+ # they can be distinguished with HDRGRIST.
+
+ # $(h) is where cc first looks for #include "foo.h" files.
+ # If the source file is in a distant directory, look there.
+ # Else, look in "" (the current directory).
+
+ if $(SEARCH_SOURCE)
+ {
+ h = $(SEARCH_SOURCE) ;
+ }
+ else
+ {
+ h = "" ;
+ }
+
+ HDRRULE on $(>) = HdrRule ;
+ HDRSCAN on $(>) = $(HDRPATTERN) ;
+ HDRSEARCH on $(>) = $(HDRS) $(SUBDIRHDRS) $(h) $(STDHDRS) ;
+ HDRGRIST on $(>) = $(HDRGRIST) ;
+
+ # if source is not .c, generate .c with specific rule
+
+ switch $(>:S)
+ {
+ case .asm : As $(<) : $(>) ;
+ case .c : Cc $(<) : $(>) ;
+ case .C : C++ $(<) : $(>) ;
+ case .cc : C++ $(<) : $(>) ;
+ case .cpp : C++ $(<) : $(>) ;
+ case .f : Fortran $(<) : $(>) ;
+ case .l : Cc $(<) : $(<:S=.c) ;
+ Lex $(<:S=.c) : $(>) ;
+ case .s : As $(<) : $(>) ;
+ case .y : Cc $(<) : $(<:S=.c) ;
+ Yacc $(<:S=.c) : $(>) ;
+ case * : UserObject $(<) : $(>) ;
+ }
+}
+
+
+rule ObjectCcFlags
+{
+ CCFLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
+}
+
+rule ObjectC++Flags
+{
+ C++FLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
+}
+
+rule ObjectHdrs
+{
+ HDRS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;
+}
+
+rule Objects
+{
+ local _i ;
+
+ for _i in [ FGristFiles $(<) ]
+ {
+ Object $(_i:S=$(SUFOBJ)) : $(_i) ;
+ DEPENDS obj : $(_i:S=$(SUFOBJ)) ;
+ }
+}
+
+rule RmTemps
+{
+ TEMPORARY $(>) ;
+}
+
+rule Setuid
+{
+ MODE on [ FAppendSuffix $(<) : $(SUFEXE) ] = 4711 ;
+}
+
+rule Shell
+{
+ DEPENDS shell : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ SEARCH on $(>) = $(SEARCH_SOURCE) ;
+ MODE on $(<) = $(SHELLMODE) ;
+ Clean clean : $(<) ;
+ Chmod $(<) ;
+}
+
+rule SubDir
+{
+ local _r _s ;
+
+ #
+ # SubDir TOP d1 [ ... ]
+ #
+ # This introduces a Jamfile that is part of a project tree
+ # rooted at $(TOP). It (only once) includes the project-specific
+ # rules file $(TOP)/Jamrules and then sets search & locate stuff.
+ #
+ # If the variable $(TOPRULES) is set (where TOP is the first arg
+ # to SubDir), that file is included instead of $(TOP)/Jamrules.
+ #
+ # d1 ... are the directory elements that lead to this directory
+ # from $(TOP). We construct the system dependent path from these
+ # directory elements in order to set search & locate stuff.
+ #
+
+ if ! $($(<[1]))
+ {
+ if ! $(<[1])
+ {
+ EXIT SubDir syntax error ;
+ }
+
+ $(<[1]) = [ FSubDir $(<[2-]) ] ;
+ }
+
+ #
+ # If $(TOP)/Jamrules hasn't been included, do so.
+ #
+
+ if ! $($(<[1])-included)
+ {
+ # Gated entry.
+
+ $(<[1])-included = TRUE ;
+
+ # File is $(TOPRULES) or $(TOP)/Jamrules.
+
+ _r = $($(<[1])RULES) ;
+
+ if ! $(_r)
+ {
+ _r = $(JAMRULES:R=$($(<[1]))) ;
+ }
+
+ # Include it.
+
+ include $(_r) ;
+ }
+
+ # Get path to current directory from root using SubDir.
+ # Save dir tokens for other potential uses.
+
+ _s = [ FDirName $(<[2-]) ] ;
+ SUBDIR = $(_s:R=$($(<[1]))) ;
+ SUBDIR_TOKENS = $(<[2-]) ;
+
+ # Now set up SEARCH_SOURCE, LOCATE_TARGET, SOURCE_GRIST
+ # These can be reset if needed. For example, if the source
+ # directory should not hold object files, LOCATE_TARGET can
+ # subsequently be redefined.
+
+ SEARCH_SOURCE = $(SUBDIR) ;
+ LOCATE_SOURCE = $(ALL_LOCATE_TARGET) $(SUBDIR) ;
+ LOCATE_TARGET = $(ALL_LOCATE_TARGET) $(SUBDIR) ;
+ SOURCE_GRIST = [ FGrist $(<[2-]) ] ;
+
+ # Reset per-directory ccflags, hdrs
+
+ SUBDIRCCFLAGS = ;
+ SUBDIRC++FLAGS = ;
+ SUBDIRHDRS = ;
+}
+
+rule SubDirCcFlags
+{
+ SUBDIRCCFLAGS += $(<) ;
+}
+
+rule SubDirC++Flags
+{
+ SUBDIRC++FLAGS += $(<) ;
+}
+
+rule SubDirHdrs
+{
+ SUBDIRHDRS += $(<) ;
+}
+
+rule SubInclude
+{
+ local _s ;
+
+ # That's
+ # SubInclude TOP d1 [ d2 [ d3 [ d4 ] ] ]
+ #
+ # to include a subdirectory's Jamfile.
+
+ if ! $($(<[1]))
+ {
+ EXIT Top level of source tree has not been set with $(<[1]) ;
+ }
+
+ _s = [ FDirName $(<[2-]) ] ;
+
+ include $(JAMFILE:D=$(_s):R=$($(<[1]))) ;
+}
+
+rule Undefines
+{
+ UNDEFS on [ FAppendSuffix $(<) : $(SUFEXE) ] += $(UNDEFFLAG)$(>) ;
+}
+
+rule UserObject
+{
+ EXIT "Unknown suffix on" $(>) "- see UserObject rule in Jamfile(5)." ;
+}
+
+rule Yacc
+{
+ local _h ;
+
+ _h = $(<:BS=.h) ;
+
+ # Some places don't have a yacc.
+
+ MakeLocate $(<) $(_h) : $(LOCATE_SOURCE) ;
+
+ if $(YACC)
+ {
+ DEPENDS $(<) $(_h) : $(>) ;
+ Yacc1 $(<) $(_h) : $(>) ;
+ YaccMv $(<) $(_h) : $(>) ;
+ Clean clean : $(<) $(_h) ;
+ }
+
+ # Make sure someone includes $(_h) else it will be a deadly independent
+ # target.
+ INCLUDES $(<) : $(_h) ;
+}
+
+#
+# Utility rules; no side effects on these.
+#
+
+rule FGrist
+{
+ # Turn individual elements in $(<) into grist.
+
+ local _g _i ;
+
+ _g = $(<[1]) ;
+
+ for _i in $(<[2-])
+ {
+ _g = $(_g)!$(_i) ;
+ }
+
+ return $(_g) ;
+}
+
+rule FGristFiles
+{
+ if ! $(SOURCE_GRIST)
+ {
+ return $(<) ;
+ }
+ else
+ {
+ return $(<:G=$(SOURCE_GRIST)) ;
+ }
+}
+
+rule FGristSourceFiles
+{
+ # Produce source file name name with grist in it,
+ # if SOURCE_GRIST is set.
+
+ # Leave header files alone, because they have a global
+ # visibility.
+
+ if ! $(SOURCE_GRIST)
+ {
+ return $(<) ;
+ }
+ else
+ {
+ local _i _o ;
+
+ for _i in $(<)
+ {
+ switch $(_i)
+ {
+ case *.h : _o += $(_i) ;
+ case * : _o += $(_i:G=$(SOURCE_GRIST)) ;
+ }
+ }
+
+ return $(_o) ;
+ }
+}
+
+rule FConcat
+{
+ # Puts the variables together, removing spaces.
+
+ local _t _r ;
+
+ $(_r) = $(<[1]) ;
+
+ for _t in $(<[2-])
+ {
+ $(_r) = $(_r)$(_t) ;
+ }
+
+ return $(_r) ;
+}
+
+rule FSubDir
+{
+ local _i _d ;
+
+ # If $(>) is the path to the current directory, compute the
+ # path (using ../../ etc) back to that root directory.
+ # Sets result in $(<)
+
+ if ! $(<[1])
+ {
+ _d = $(DOT) ;
+ }
+ else
+ {
+ _d = $(DOTDOT) ;
+
+ for _i in $(<[2-])
+ {
+ _d = $(_d:R=$(DOTDOT)) ;
+ }
+ }
+
+ return $(_d) ;
+}
+
+rule FDirName
+{
+ local _s _i ;
+
+ # Turn individual elements in $(<) into a usable path.
+
+ if ! $(<)
+ {
+ _s = $(DOT) ;
+ }
+ else if $(VMS)
+ {
+ # This handles the following cases:
+ # a -> [.a]
+ # a b c -> [.a.b.c]
+ # x: -> x:
+ # x: a -> x:[a]
+ # x:[a] b -> x:[a.b]
+
+ switch $(<[1])
+ {
+ case *:* : _s = $(<[1]) ;
+ case \\[*\\] : _s = $(<[1]) ;
+ case * : _s = [.$(<[1])] ;
+ }
+
+ for _i in [.$(<[2-])]
+ {
+ _s = $(_i:R=$(_s)) ;
+ }
+ }
+ else if $(MAC)
+ {
+ _s = $(DOT) ;
+
+ for _i in $(<)
+ {
+ _s = $(_i:R=$(_s)) ;
+ }
+ }
+ else
+ {
+ _s = $(<[1]) ;
+
+ for _i in $(<[2-])
+ {
+ _s = $(_i:R=$(_s)) ;
+ }
+ }
+
+ return $(_s) ;
+}
+
+
+rule _makeCommon
+{
+ # strip common initial elements
+
+ if $($(<)[1]) && $($(<)[1]) = $($(>)[1])
+ {
+ $(<) = $($(<)[2-]) ;
+ $(>) = $($(>)[2-]) ;
+ _makeCommon $(<) : $(>) ;
+ }
+}
+
+
+rule FRelPath
+{
+ local _l _r ;
+
+ # first strip off common parts
+
+ _l = $(<) ;
+ _r = $(>) ;
+
+ _makeCommon _l : _r ;
+
+ # now make path to root and path down
+
+ _l = [ FSubDir $(_l) ] ;
+ _r = [ FDirName $(_r) ] ;
+
+ # Concatenate and save
+
+ # XXX This should be better
+
+ if $(_r) = $(DOT) {
+ return $(_l) ;
+ } else {
+ return $(_r:R=$(_l)) ;
+ }
+}
+
+rule FAppendSuffix
+{
+ # E.g., "FAppendSuffix yacc lex foo.bat : $(SUFEXE) ;"
+ # returns (yacc,lex,foo.bat) on Unix and
+ # (yacc.exe,lex.exe,foo.bat) on NT.
+
+ if $(>)
+ {
+ local _i _o ;
+
+ for _i in $(<)
+ {
+ if $(_i:S)
+ {
+ _o += $(_i) ;
+ }
+ else
+ {
+ _o += $(_i:S=$(>)) ;
+ }
+ }
+ return $(_o) ;
+ }
+ else
+ {
+ return $(<) ;
+ }
+}
+
+rule unmakeDir
+{
+ if $(>[1]:D) && $(>[1]:D) != $(>[1]) && $(>[1]:D) != \\\\
+ {
+ unmakeDir $(<) : $(>[1]:D) $(>[1]:BS) $(>[2-]) ;
+ }
+ else
+ {
+ $(<) = $(>) ;
+ }
+}
+
+
+rule FConvertToSlashes
+{
+ local _d, _s, _i ;
+
+ unmakeDir _d : $(<) ;
+
+ _s = $(_d[1]) ;
+ for _i in $(_d[2-])
+ {
+ _s = $(_s)/$(_i) ;
+ }
+ return $(_s) ;
+}
+
+
+#
+# Actions
+#
+
+#
+# First the defaults
+#
+
+actions updated together piecemeal Archive
+{
+ $(AR) $(<) $(>)
+}
+
+actions As
+{
+ $(AS) $(ASFLAGS) -I$(HDRS) -o $(<) $(>)
+}
+
+actions C++
+{
+ $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)
+}
+
+actions Cc
+{
+ $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)
+}
+
+actions Chgrp
+{
+ $(CHGRP) $(GROUP) $(<)
+}
+
+actions Chmod1
+{
+ $(CHMOD) $(MODE) $(<)
+}
+
+actions Chown
+{
+ $(CHOWN) $(OWNER) $(<)
+}
+
+actions piecemeal together existing Clean
+{
+ $(RM) $(>)
+}
+
+actions File
+{
+ $(CP) $(>) $(<)
+}
+
+actions GenFile1
+{
+ $(>[1]) $(<) $(>[2-])
+}
+
+actions Fortran
+{
+ $(FORTRAN) $(FORTRANFLAGS) -o $(<) $(>)
+}
+
+actions HardLink
+{
+ $(RM) $(<) && $(LN) $(>) $(<)
+}
+
+actions Install
+{
+ $(CP) $(>) $(<)
+}
+
+actions Lex
+{
+ $(LEX) $(>)
+}
+
+actions LexMv
+{
+ $(MV) lex.yy.c $(<)
+}
+
+actions Link bind NEEDLIBS
+{
+ $(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+}
+
+actions MkDir1
+{
+ $(MKDIR) $(<)
+}
+
+actions together Ranlib
+{
+ $(RANLIB) $(<)
+}
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) $(>)
+}
+
+actions Shell
+{
+ $(AWK) '
+ NR == 1 { print "$(SHELLHEADER)" }
+ NR == 1 && /^[#:]/ { next }
+ /^##/ { next }
+ { print }
+ ' < $(>) > $(<)
+}
+
+actions Yacc1
+{
+ $(YACC) $(YACCFLAGS) $(>)
+}
+
+actions YaccMv
+{
+ $(MV) $(YACCFILES).c $(<[1])
+ $(MV) $(YACCFILES).h $(<[2])
+}
+
+#
+# RELOCATE - for compilers with broken -o flags
+#
+
+if $(RELOCATE)
+{
+ actions C++
+ {
+ $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) $(>)
+ }
+
+ actions Cc
+ {
+ $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) $(>)
+ }
+
+ actions ignore CcMv
+ {
+ [ $(<) != $(>:BS=$(SUFOBJ)) ] && $(MV) $(>:BS=$(SUFOBJ)) $(<)
+ }
+}
+
+#
+# NOARUPDATE - can't update an archive
+#
+
+if $(NOARUPDATE)
+{
+ actions Archive
+ {
+ $(AR) $(<) $(>)
+ }
+}
+
+#
+# NT specific actions
+#
+
+if $(NT)
+{
+ if $(TOOLSET) = VISUALC || $(TOOLSET) = VC7 || $(TOOLSET) = INTELC
+ {
+ actions updated together piecemeal Archive
+ {
+ if exist $(<) set _$(<:B)_=$(<)
+ $(AR) /out:$(<) %_$(<:B)_% $(>)
+ }
+
+ actions As
+ {
+ $(AS) /Ml /p /v /w2 $(>) $(<) ,nul,nul;
+ }
+
+ actions Cc
+ {
+ $(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) /Tp$(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+ }
+ }
+ else if $(TOOLSET) = VISUALC16
+ {
+ actions updated together piecemeal Archive
+ {
+ $(AR) $(<) -+$(>)
+ }
+
+ actions Cc
+ {
+ $(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /Tp$(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+ }
+ }
+ else if $(TOOLSET) = BORLANDC
+ {
+ actions updated together piecemeal Archive
+ {
+ $(AR) $(<) -+$(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) -e$(<) $(LINKFLAGS) $(UNDEFS) -L$(LINKLIBS) $(NEEDLIBS) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+
+ actions Cc
+ {
+ $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+
+ }
+ else if $(TOOLSET) = MINGW
+ {
+ actions together piecemeal Archive
+ {
+ $(AR) $(<) $(>:T)
+ }
+
+ actions Cc
+ {
+ $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+ }
+ else if $(TOOLSET) = WATCOM
+ {
+ actions together piecemeal Archive
+ {
+ $(AR) $(<) +-$(>)
+ }
+
+ actions Cc
+ {
+ $(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+ }
+
+ actions Shell
+ {
+ $(CP) $(>) $(<)
+ }
+ }
+ else if $(TOOLSET) = LCC
+ {
+ actions together piecemeal Archive
+ {
+ $(AR) /out:$(<) $(>)
+ }
+
+ actions Cc
+ {
+ $(CC) $(CCFLAGS) $(OPTIM) -Fo$(<) -I$(HDRS) $(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+ }
+
+ actions Shell
+ {
+ $(CP) $(>) $(<)
+ }
+ }
+}
+
+#
+# OS2 specific actions
+#
+
+else if $(OS2)
+{
+ if $(TOOLSET) = WATCOM
+ {
+ actions together piecemeal Archive
+ {
+ $(AR) $(<) +-$(>)
+ }
+
+ actions Cc
+ {
+ $(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)
+ }
+
+ actions Shell
+ {
+ $(CP) $(>) $(<)
+ }
+ }
+ else if $(TOOLSET) = EMX
+ {
+ actions together piecemeal Archive
+ {
+ $(AR) $(<) $(>:T)
+ }
+
+ actions Cc
+ {
+ $(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+
+ actions C++
+ {
+ $(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)
+ }
+ }
+}
+
+#
+# VMS specific actions
+#
+
+else if $(VMS)
+{
+ actions updated together piecemeal Archive
+ {
+ lib/replace $(<) $(>[1]) ,$(>[2-])
+ }
+
+ actions Cc
+ {
+ $(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>)
+ }
+
+ actions C++
+ {
+ $(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>)
+ }
+
+ actions piecemeal together existing Clean
+ {
+ $(RM) $(>[1]);* ,$(>[2-]);*
+ }
+
+ actions together quietly CreLib
+ {
+ if f$search("$(<)") .eqs. "" then lib/create $(<)
+ }
+
+ actions GenFile1
+ {
+ mcr $(>[1]) $(<) $(>[2-])
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS)
+ }
+
+ actions quietly updated piecemeal together RmTemps
+ {
+ $(RM) $(>[1]);* ,$(>[2-]);*
+ }
+
+ actions Shell
+ {
+ $(CP) $(>) $(<)
+ }
+}
+
+#
+# Mac specifc actions
+#
+
+else if $(MAC)
+{
+ actions together Archive
+ {
+ $(LINK) -library -o $(<) $(>)
+ }
+
+ actions Cc
+ {
+ set -e MWCincludes $(MACINC)
+ $(CC) -o $(<) $(CCFLAGS) $(OPTIM) $(>)
+ }
+
+ actions C++
+ {
+ set -e MWCincludes $(MACINC)
+ $(CC) -o $(<) $(C++FLAGS) $(OPTIM) $(>)
+ }
+
+ actions Link bind NEEDLIBS
+ {
+ $(LINK) -o $(<) $(LINKFLAGS) $(>) $(NEEDLIBS) "$(LINKLIBS)"
+ }
+}
+
+#
+# Backwards compatibility with jam 1, where rules were uppercased.
+#
+
+rule BULK { Bulk $(<) : $(>) ; }
+rule FILE { File $(<) : $(>) ; }
+rule HDRRULE { HdrRule $(<) : $(>) ; }
+rule INSTALL { Install $(<) : $(>) ; }
+rule LIBRARY { Library $(<) : $(>) ; }
+rule LIBS { LinkLibraries $(<) : $(>) ; }
+rule LINK { Link $(<) : $(>) ; }
+rule MAIN { Main $(<) : $(>) ; }
+rule SETUID { Setuid $(<) ; }
+rule SHELL { Shell $(<) : $(>) ; }
+rule UNDEFINES { Undefines $(<) : $(>) ; }
+
+# Old INSTALL* didn't take dest directory.
+
+rule INSTALLBIN { InstallBin $(BINDIR) : $(<) ; }
+rule INSTALLLIB { InstallLib $(LIBDIR) : $(<) ; }
+rule INSTALLMAN { InstallMan $(MANDIR) : $(<) ; }
+
+# Compatibility with jam 2.2.
+
+rule addDirName { $(<) += [ FDirName $(>) ] ; }
+rule makeDirName { $(<) = [ FDirName $(>) ] ; }
+rule makeGristedName { $(<) = [ FGristSourceFiles $(>) ] ; }
+rule makeRelPath { $(<[1]) = [ FRelPath $(<[2-]) : $(>) ] ; }
+rule makeSuffixed { $(<[1]) = [ FAppendSuffix $(>) : $(<[2]) ] ; }
+
+#
+# Now include the user's Jamfile.
+#
+
+{
+ if $(JAMFILE) { include $(JAMFILE) ; }
+}
+
+}
diff --git a/jam-files/engine/boost-jam.spec b/jam-files/engine/boost-jam.spec
new file mode 100644
index 000000000..bc572fc96
--- /dev/null
+++ b/jam-files/engine/boost-jam.spec
@@ -0,0 +1,64 @@
+Name: boost-jam
+Version: 3.1.19
+Summary: Build tool
+Release: 1
+Source: %{name}-%{version}.tgz
+
+License: Boost Software License, Version 1.0
+Group: Development/Tools
+URL: http://www.boost.org
+Packager: Rene Rivera <grafik@redshift-software.com>
+BuildRoot: /var/tmp/%{name}-%{version}.root
+
+%description
+Boost Jam is a build tool based on FTJam, which in turn is based on
+Perforce Jam. It contains significant improvements made to facilitate
+its use in the Boost Build System, but should be backward compatible
+with Perforce Jam.
+
+Authors:
+ Perforce Jam : Cristopher Seiwald
+ FT Jam : David Turner
+ Boost Jam : David Abrahams
+
+Copyright:
+ /+\
+ +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ \+/
+ License is hereby granted to use this software and distribute it
+ freely, as long as this copyright notice is retained and modifications
+ are clearly marked.
+ ALL WARRANTIES ARE HEREBY DISCLAIMED.
+
+Also:
+ Copyright 2001-2006 David Abrahams.
+ Copyright 2002-2006 Rene Rivera.
+ Copyright 2003-2006 Vladimir Prus.
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+%prep
+%setup -n %{name}-%{version}
+
+%build
+LOCATE_TARGET=bin ./build.sh $BOOST_JAM_TOOLSET
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT%{_bindir}
+mkdir -p $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
+install -m 755 bin/bjam $RPM_BUILD_ROOT%{_bindir}/bjam-%{version}
+ln -sf bjam-%{version} $RPM_BUILD_ROOT%{_bindir}/bjam
+cp -R *.html *.png *.css LICENSE*.txt images jam $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
+
+find $RPM_BUILD_ROOT -name CVS -type d -exec rm -r {} \;
+
+%files
+%defattr(-,root,root)
+%attr(755,root,root) /usr/bin/*
+%doc %{_docdir}/%{name}-%{version}
+
+
+%clean
+rm -rf $RPM_BUILD_ROOT
diff --git a/jam-files/engine/boost-no-inspect b/jam-files/engine/boost-no-inspect
new file mode 100644
index 000000000..8a06f3a70
--- /dev/null
+++ b/jam-files/engine/boost-no-inspect
@@ -0,0 +1 @@
+this really out of our hands, so tell inspect to ignore directory \ No newline at end of file
diff --git a/jam-files/engine/build.bat b/jam-files/engine/build.bat
new file mode 100644
index 000000000..f927b7697
--- /dev/null
+++ b/jam-files/engine/build.bat
@@ -0,0 +1,532 @@
+@ECHO OFF
+
+REM ~ Copyright 2002-2007 Rene Rivera.
+REM ~ Distributed under the Boost Software License, Version 1.0.
+REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+setlocal
+goto Start
+
+
+:Set_Error
+color 00
+goto :eof
+
+
+:Clear_Error
+ver >nul
+goto :eof
+
+
+:Error_Print
+REM Output an error message and set the errorlevel to indicate failure.
+setlocal
+ECHO ###
+ECHO ### %1
+ECHO ###
+ECHO ### You can specify the toolset as the argument, i.e.:
+ECHO ### .\build.bat msvc
+ECHO ###
+ECHO ### Toolsets supported by this script are: borland, como, gcc, gcc-nocygwin,
+ECHO ### intel-win32, metrowerks, mingw, msvc, vc7, vc8, vc9, vc10
+ECHO ###
+call :Set_Error
+endlocal
+goto :eof
+
+
+:Test_Path
+REM Tests for the given file(executable) presence in the directories in the PATH
+REM environment variable. Additionaly sets FOUND_PATH to the path of the
+REM found file.
+call :Clear_Error
+setlocal
+set test=%~$PATH:1
+endlocal
+if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
+goto :eof
+
+
+:Test_Option
+REM Tests whether the given string is in the form of an option: "--*"
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Set_Error
+ goto Test_Option_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "-" == "%test:~1,1%" call :Set_Error
+:Test_Option_End
+endlocal
+goto :eof
+
+
+:Test_Empty
+REM Tests whether the given string is not empty
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Clear_Error
+ goto Test_Empty_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "" == "%test%" call :Set_Error
+:Test_Empty_End
+endlocal
+goto :eof
+
+
+:Call_If_Exists
+if EXIST %1 call %*
+goto :eof
+
+
+:Guess_Toolset
+REM Try and guess the toolset to bootstrap the build with...
+REM Sets BOOST_JAM_TOOLSET to the first found toolset.
+REM May also set BOOST_JAM_TOOLSET_ROOT to the
+REM location of the found toolset.
+
+call :Clear_Error
+call :Test_Empty %ProgramFiles%
+if not errorlevel 1 set ProgramFiles=C:\Program Files
+
+call :Clear_Error
+if NOT "_%VS100COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc10"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc10"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 10.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS90COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc9"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 9.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc9"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 9.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS80COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc8"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 8\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc8"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 8\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS71COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%\..\..\VC7\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VCINSTALLDIR%_" == "__" (
+ REM %VCINSTALLDIR% is also set for VC9 (and probably VC8)
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%VCINSTALLDIR%\VC7\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET\VC7\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET\VC7\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%MSVCDir%_" == "__" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio\VC98\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio\VC98\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual C++\VC98\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual C++\VC98\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path cl.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path vcvars32.bat
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=msvc"
+ call "%FOUND_PATH%VCVARS32.BAT"
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\Borland\BCC55\Bin\bcc32.exe" (
+ set "BOOST_JAM_TOOLSET=borland"
+ set "BOOST_JAM_TOOLSET_ROOT=C:\Borland\BCC55\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path bcc32.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=borland"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path icl.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=intel-win32"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\MinGW\bin\gcc.exe" (
+ set "BOOST_JAM_TOOLSET=mingw"
+ set "BOOST_JAM_TOOLSET_ROOT=C:\MinGW\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%CWFolder%_" == "__" (
+ set "BOOST_JAM_TOOLSET=metrowerks"
+ set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
+ goto :eof )
+call :Clear_Error
+call :Test_Path mwcc.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=metrowerks"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\..\"
+ goto :eof)
+call :Clear_Error
+call :Error_Print "Could not find a suitable toolset."
+goto :eof
+
+
+:Guess_Yacc
+REM Tries to find bison or yacc in common places so we can build the grammar.
+call :Clear_Error
+call :Test_Path yacc.exe
+if not errorlevel 1 (
+ set "YACC=yacc -d"
+ goto :eof)
+call :Clear_Error
+call :Test_Path bison.exe
+if not errorlevel 1 (
+ set "YACC=bison -d --yacc"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\Program Files\GnuWin32\bin\bison.exe" (
+ set "YACC=C:\Program Files\GnuWin32\bin\bison.exe" -d --yacc
+ goto :eof)
+call :Clear_Error
+call :Error_Print "Could not find Yacc to build the Jam grammar."
+goto :eof
+
+
+:Start
+set BOOST_JAM_TOOLSET=
+set BOOST_JAM_ARGS=
+
+REM If no arguments guess the toolset;
+REM or if first argument is an option guess the toolset;
+REM otherwise the argument is the toolset to use.
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+call :Test_Option %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+set BOOST_JAM_TOOLSET=%1
+shift
+goto Setup_Toolset
+
+
+:Setup_Toolset
+REM Setup the toolset command and options. This bit of code
+REM needs to be flexible enough to handle both when
+REM the toolset was guessed at and found, or when the toolset
+REM was indicated in the command arguments.
+REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
+REM because in BAT variables are subsituted only once during a single
+REM command. A complete "if ... ( commands ) else ( commands )"
+REM is a single command, even though it's in multiple lines here.
+:Setup_Args
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 goto Config_Toolset
+call :Clear_Error
+call :Test_Option %1
+if errorlevel 1 (
+ set BOOST_JAM_ARGS=%BOOST_JAM_ARGS% %1
+ shift
+ goto Setup_Args
+)
+:Config_Toolset
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_metrowerks_" goto Skip_METROWERKS
+if NOT "_%CWFolder%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
+ )
+set "PATH=%BOOST_JAM_TOOLSET_ROOT%Other Metrowerks Tools\Command Line Tools;%PATH%"
+set "BOOST_JAM_CC=mwcc -runtime ss -cwd include -DNT -lkernel32.lib -ladvapi32.lib -luser32.lib"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_METROWERKS
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_msvc_" goto Skip_MSVC
+if NOT "_%MSVCDir%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ )
+call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ )
+set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_MSVC
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc7_" goto Skip_VC7
+if NOT "_%VS71COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%..\..\VC7\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC7
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc8_" goto Skip_VC8
+if NOT "_%VS80COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC8
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc9_" goto Skip_VC9
+if NOT "_%VS90COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC9
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc10_" goto Skip_VC10
+if NOT "_%VS100COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC10
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_borland_" goto Skip_BORLAND
+if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ call :Test_Path bcc32.exe )
+if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ ) )
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%Bin;%PATH%"
+ )
+set "BOOST_JAM_CC=bcc32 -WC -w- -q -I%BOOST_JAM_TOOLSET_ROOT%Include -L%BOOST_JAM_TOOLSET_ROOT%Lib /DNT -nbootstrap"
+set "BOOST_JAM_OPT_JAM=-ejam0"
+set "BOOST_JAM_OPT_MKJAMBASE=-emkjambasejam0"
+set "BOOST_JAM_OPT_YYACC=-eyyacc0"
+set "_known_=1"
+:Skip_BORLAND
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_como_" goto Skip_COMO
+set "BOOST_JAM_CC=como -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_COMO
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc_" goto Skip_GCC
+set "BOOST_JAM_CC=gcc -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_GCC
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc-nocygwin_" goto Skip_GCC_NOCYGWIN
+set "BOOST_JAM_CC=gcc -DNT -mno-cygwin"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_GCC_NOCYGWIN
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_intel-win32_" goto Skip_INTEL_WIN32
+set "BOOST_JAM_CC=icl -DNT /nologo kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_INTEL_WIN32
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_mingw_" goto Skip_MINGW
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ )
+set "BOOST_JAM_CC=gcc -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_MINGW
+call :Clear_Error
+if "_%_known_%_" == "__" (
+ call :Error_Print "Unknown toolset: %BOOST_JAM_TOOLSET%"
+)
+if errorlevel 1 goto Finish
+
+echo ###
+echo ### Using '%BOOST_JAM_TOOLSET%' toolset.
+echo ###
+
+set YYACC_SOURCES=yyacc.c
+set MKJAMBASE_SOURCES=mkjambase.c
+set BJAM_SOURCES=
+set BJAM_SOURCES=%BJAM_SOURCES% command.c compile.c debug.c execnt.c expand.c filent.c glob.c hash.c
+set BJAM_SOURCES=%BJAM_SOURCES% hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c
+set BJAM_SOURCES=%BJAM_SOURCES% newstr.c option.c output.c parse.c pathunix.c regexp.c
+set BJAM_SOURCES=%BJAM_SOURCES% rules.c scan.c search.c subst.c timestamp.c variable.c modules.c
+set BJAM_SOURCES=%BJAM_SOURCES% strings.c filesys.c builtins.c md5.c pwd.c class.c w32_getreg.c native.c
+set BJAM_SOURCES=%BJAM_SOURCES% modules/set.c modules/path.c modules/regex.c
+set BJAM_SOURCES=%BJAM_SOURCES% modules/property-set.c modules/sequence.c modules/order.c
+
+set BJAM_UPDATE=
+:Check_Update
+call :Test_Empty %1
+if not errorlevel 1 goto Check_Update_End
+call :Clear_Error
+setlocal
+set test=%1
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if "%test%" == "--update" set BJAM_UPDATE=update
+endlocal
+shift
+if not "_%BJAM_UPDATE%_" == "_update_" goto Check_Update
+:Check_Update_End
+if "_%BJAM_UPDATE%_" == "_update_" (
+ if not exist ".\bootstrap\jam0.exe" (
+ set BJAM_UPDATE=
+ )
+)
+
+@echo ON
+@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Bootstrap
+if exist bootstrap rd /S /Q bootstrap
+md bootstrap
+@if not exist jamgram.y goto Bootstrap_GrammarPrep
+@if not exist jamgramtab.h goto Bootstrap_GrammarPrep
+@goto Skip_GrammarPrep
+:Bootstrap_GrammarPrep
+%BOOST_JAM_CC% %BOOST_JAM_OPT_YYACC% %YYACC_SOURCES%
+@if not exist ".\bootstrap\yyacc0.exe" goto Skip_GrammarPrep
+.\bootstrap\yyacc0 jamgram.y jamgramtab.h jamgram.yy
+:Skip_GrammarPrep
+@if not exist jamgram.c goto Bootstrap_GrammarBuild
+@if not exist jamgram.h goto Bootstrap_GrammarBuild
+@goto Skip_GrammarBuild
+:Bootstrap_GrammarBuild
+@echo OFF
+if "_%YACC%_" == "__" (
+ call :Guess_Yacc
+)
+if errorlevel 1 goto Finish
+@echo ON
+%YACC% jamgram.y
+@if errorlevel 1 goto Finish
+del /f jamgram.c
+rename y.tab.c jamgram.c
+del /f jamgram.h
+rename y.tab.h jamgram.h
+:Skip_GrammarBuild
+@echo ON
+@if exist jambase.c goto Skip_Jambase
+%BOOST_JAM_CC% %BOOST_JAM_OPT_MKJAMBASE% %MKJAMBASE_SOURCES%
+@if not exist ".\bootstrap\mkjambase0.exe" goto Skip_Jambase
+.\bootstrap\mkjambase0 jambase.c Jambase
+:Skip_Jambase
+%BOOST_JAM_CC% %BOOST_JAM_OPT_JAM% %BJAM_SOURCES%
+:Skip_Bootstrap
+@if not exist ".\bootstrap\jam0.exe" goto Skip_Jam
+@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Clean
+.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " clean
+:Skip_Clean
+@set args=%*
+@echo OFF
+:Set_Args
+setlocal
+call :Test_Empty %args%
+if not errorlevel 1 goto Set_Args_End
+set test=###%args:~0,2%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+set test=%test:~0,1%
+if "-" == "%test%" goto Set_Args_End
+endlocal
+set args=%args:~1%
+goto Set_Args
+:Set_Args_End
+@echo ON
+.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args%
+:Skip_Jam
+
+:Finish
diff --git a/jam-files/engine/build.jam b/jam-files/engine/build.jam
new file mode 100644
index 000000000..266b07a17
--- /dev/null
+++ b/jam-files/engine/build.jam
@@ -0,0 +1,1070 @@
+#~ Copyright 2002-2007 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Clean env vars of any "extra" empty values.
+for local v in ARGV CC CFLAGS LIBS
+{
+ local values ;
+ for local x in $($(v))
+ {
+ if $(x) != ""
+ {
+ values += $(x) ;
+ }
+ }
+ $(v) = $(values) ;
+}
+
+# Platform related specifics.
+if $(OS) = NT { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
+else if $(OS) = OS2 { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
+else if $(OS) = VMS { rule .path { return "[.$(<:J=/)]" ; } }
+else if $(OS) = MAC { rule .path { return ":$(<:J=\:)" ; } }
+else { rule .path { return "$(<:J=/)" ; } }
+if $(OS) = VMS { . = "_" ; }
+else { . = "." ; }
+./ ?= "" ;
+
+# Info about what we are building.
+_VERSION_ = 3 1 19 ;
+NAME = boost-jam ;
+VERSION = $(_VERSION_:J=$(.)) ;
+RELEASE = 1 ;
+LICENSE = LICENSE_1_0 ;
+
+# Generate development debug binaries?
+if --debug in $(ARGV)
+{
+ debug = true ;
+}
+
+if --profile in $(ARGV)
+{
+ profile = true ;
+}
+
+# Attempt to generate and/or build the grammar?
+if --grammar in $(ARGV)
+{
+ grammar = true ;
+}
+
+# Do we need to add a default build type argument?
+if ! ( --release in $(ARGV) ) &&
+ ! ( --debug in $(ARGV) ) &&
+ ! ( --profile in $(ARGV) )
+{
+ ARGV += --release ;
+}
+
+# Enable, and configure, Python hooks.
+with-python = ;
+python-location = [ MATCH --with-python=(.*) : $(ARGV) ] ;
+if $(python-location)
+{
+ with-python = true ;
+}
+if $(with-python)
+{
+ if $(OS) = NT
+ {
+ --python-include = [ .path $(python-location) include ] ;
+ --python-lib = ;
+ for local v in 26 25 24 23 22
+ {
+ --python-lib ?=
+ [ GLOB [ .path $(python-location) libs ] : "python$(v).lib" ]
+ [ GLOB $(python-location) [ .path $(python-location) libs ]
+ $(Path) $(PATH) $(path) : "python$(v).dll" ]
+ ;
+ if ! $(--python-lib[2])
+ {
+ --python-lib = ;
+ }
+ }
+ --python-lib = $(--python-lib[1]) ;
+ }
+ else if $(OS) = MACOSX
+ {
+ --python-include = [ .path $(python-location) Headers ] ;
+ --python-lib = $(python-location) Python ;
+ }
+ else
+ {
+ --python-include = ;
+ --python-lib = ;
+ for local v in 2.6 2.5 2.4 2.3 2.2
+ {
+ local inc = [ GLOB [ .path $(python-location) include ] : python$(v) ] ;
+ local lib = [ GLOB [ .path $(python-location) lib ] : libpython$(v)* ] ;
+ if $(inc) && $(lib)
+ {
+ --python-include ?= $(inc) ;
+ --python-lib ?= $(lib[1]:D) python$(v) ;
+ }
+ }
+ }
+}
+
+# Boehm GC?
+if --gc in $(ARGV)
+{
+ --boehm-gc = true ;
+}
+if $(--boehm-gc)
+{
+ --extra-include += [ .path [ PWD ] "boehm_gc" "include" ] ;
+}
+
+# Duma?
+if --duma in $(ARGV)
+{
+ --duma = true ;
+}
+if $(--duma)
+{
+ --extra-include += [ .path [ PWD ] "duma" ] ;
+}
+
+# An explicit root for the toolset? (trim spaces)
+toolset-root = [ MATCH --toolset-root=(.*) : $(ARGV) ] ;
+{
+ local t = [ MATCH "[ ]*(.*)" : $(toolset-root:J=" ") ] ;
+ toolset-root = ;
+ while $(t)
+ {
+ t = [ MATCH "([^ ]+)([ ]*)(.*)" : $(t) ] ;
+ toolset-root += $(t[1]) ;
+ if $(t[3]) { toolset-root += $(t[2]) ; }
+ t = $(t[3]) ;
+ }
+ toolset-root = $(toolset-root:J="") ;
+}
+
+# Configure the implemented toolsets. These are minimal
+# commands and options to compile the full Jam. When
+# adding new toolsets make sure to add them to the
+# "known" list also.
+
+rule toolset ( name command .type ? : opt.out + : opt.define * : flags * : linklibs * )
+{
+ .type ?= "" ;
+ tool.$(name)$(.type).cc ?= $(command) ;
+ tool.$(name)$(.type).opt.out ?= $(opt.out) ;
+ tool.$(name)$(.type).opt.define ?= $(opt.define) ;
+ tool.$(name)$(.type).flags ?= $(flags) ;
+ tool.$(name)$(.type).linklibs ?= $(linklibs) ;
+ if ! $(name) in $(toolsets) { toolsets += $(name) ; }
+}
+
+rule if-os ( os + : yes-opt * : no-opt * )
+ { if $(os) in $(OS) { return $(yes-opt) ; } else { return $(no-opt) ; } }
+
+rule opt ( type : yes-opt * : no-opt * )
+ { if $(type) in $(ARGV) { return $(yes-opt) ; } else { return $(no-opt) ; } }
+
+## HP-UX aCC compiler
+toolset acc cc : "-o " : -D
+ : -Ae
+ [ opt --release : -s -O3 ]
+ [ opt --debug : -g -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Borland C++ 5.5.x
+toolset borland bcc32 : -e -n : /D
+ : -WC -w- -q "-I$(toolset-root)Include" "-L$(toolset-root)Lib"
+ [ opt --release : -O2 -vi -w-inl ]
+ [ opt --debug : -v -Od -vi- ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(--python-lib[1]) ;
+## Generic Unix cc
+if ! $(CC) { CC = cc ; }
+toolset cc $(CC) : "-o " : -D
+ : $(CFLAGS)
+ [ opt --release : -s -O ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(LIBS) -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Comeau C/C++ 4.x
+toolset como como : "-o " : -D
+ : --c
+ [ opt --release : --inlining ]
+ [ opt --debug : --no_inlining ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Clang Linux 2.8+
+toolset clang clang : "-o " : -D
+ : -Wno-unused -Wno-format
+ [ opt --release : -Os ]
+ [ opt --debug : -g -O0 -fno-inline ]
+ [ opt --profile : -finline-functions -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## MacOSX Darwin, using GCC 2.9.x, 3.x
+toolset darwin cc : "-o " : -D
+ :
+ [ opt --release : -Wl,-x -O3 -finline-functions ]
+ [ opt --debug : -g -O0 -fno-inline -pg ]
+ [ opt --profile : -Wl,-x -O3 -finline-functions -g -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## GCC 2.x, 3.x, 4.x
+toolset gcc gcc : "-o " : -D
+ : -pedantic -fno-strict-aliasing
+ [ opt --release : [ opt --symbols : -g : -s ] -O3 ]
+ [ opt --debug : -g -O0 -fno-inline ]
+ -I$(--python-include) -I$(--extra-include) -Wno-long-long
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## GCC 2.x, 3.x on CYGWIN but without cygwin1.dll
+toolset gcc-nocygwin gcc : "-o " : -D
+ : -s -O3 -mno-cygwin
+ [ opt --release : -finline-functions ]
+ [ opt --debug : -s -O3 -fno-inline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Darwin
+toolset intel-darwin icc : "-o " : -D
+ :
+ [ opt --release : -O3 ]
+ [ opt --debug : -g -O0 -p ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Linux
+toolset intel-linux icc : "-o " : -D
+ :
+ [ opt --release : -Xlinker -s -O3 ]
+ [ opt --debug : -g -O0 -p ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Win32
+toolset intel-win32 icl : /Fe : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## KCC ?
+toolset kcc KCC : "-o " : -D
+ :
+ [ opt --release : -s +K2 ]
+ [ opt --debug : -g +K0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Borland Kylix
+toolset kylix bc++ : -o : -D
+ : -tC -q
+ [ opt --release : -O2 -vi -w-inl ]
+ [ opt --debug : -v -Od -vi- ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Metrowerks CodeWarrior 8.x
+{
+ # Even though CW can compile all files at once, it crashes if it tries in the bjam case.
+ local mwcc = ; if $(OS) = NT { mwcc = mwcc ; } else { mwcc = mwc$(OSPLAT:L) ; }
+ mwcc ?= mwcc ;
+ toolset metrowerks $(mwcc) : "-o " : -D
+ : -c -lang c -subsystem console -cwd include
+ [ opt --release : -runtime ss -opt full -inline all ]
+ [ opt --debug : -runtime ssd -opt none -inline off ]
+ -I$(--python-include) -I$(--extra-include) ;
+ toolset metrowerks $(mwcc) .link : "-o " :
+ : -subsystem console -lkernel32.lib -ladvapi32.lib -luser32.lib
+ [ opt --release : -runtime ss ]
+ [ opt --debug : -runtime ssd ]
+ : $(--python-lib[1]) ;
+}
+## MINGW GCC
+toolset mingw gcc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -finline-functions ]
+ [ opt --debug : -g -O0 -fno-inline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(--python-lib[2]) ;
+## MIPS Pro
+toolset mipspro cc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -g0 -INLINE:none ]
+ [ opt --debug : -g -O0 -INLINE ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Microsoft Visual Studio C++ 6.x
+toolset msvc cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## QNX 6.x GCC 3.x/2.95.3
+toolset qcc qcc : "-o " : -D
+ : -Wc,-pedantic -Wc,-fno-strict-aliasing
+ [ opt --release : [ opt --symbols : -g ] -O3 -Wc,-finline-functions ]
+ [ opt --debug : -g -O0 -Wc,-fno-inline ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Qlogic Pathscale 2.4
+toolset pathscale pathcc : "-o " : -D
+ :
+ [ opt --release : -s -Ofast -O3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Portland Group Pgi 6.2
+toolset pgi pgcc : "-o " : -D
+ :
+ [ opt --release : -s -O3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Sun Workshop 6 C++
+toolset sun cc : "-o " : -D
+ :
+ [ opt --release : -s -fast -xO4 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Sun Workshop 6 C++ (old alias)
+toolset sunpro cc : "-o " : -D
+ :
+ [ opt --release : -s -fast -xO4 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Compaq Alpha CXX
+toolset tru64cxx cc : "-o " : -D
+ :
+ [ opt --release : -s -O5 -inline speed ]
+ [ opt --debug : -g -O0 -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## IBM VisualAge C++
+toolset vacpp xlc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -qstrict -qinline ]
+ [ opt --debug : -g -qNOOPTimize -qnoinline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) [ if-os AIX : -bmaxdata:0x40000000 ] ;
+## Microsoft Visual C++ .NET 7.x
+toolset vc7 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2005
+toolset vc8 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2008
+toolset vc9 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2010
+toolset vc10 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## VMS/OpenVMS DEC C
+toolset vmsdecc cc : /OBJECT= : "/DEFINES=(" "," ")"
+ : /STANDARD=VAXC /PREFIX_LIBRARY_ENTRIES=ALL_ENTRIES
+ [ opt --release : /OPTIMIZE /NODEBUG ]
+ [ opt --debug : /NOOPTIMIZE /DEBUG ]
+ ;
+toolset vmsdecc link .link : /EXECUTABLE= :
+ : /NOMAP
+ [ opt --release : /NODEBUG ]
+ [ opt --debug : /DEBUG ]
+ ;
+
+# First set the build commands and options according to the
+# preset toolset.
+toolset = [ MATCH --toolset=(.*) : $(ARGV) ] ;
+if ! $(toolset)
+{
+ # For some reason, the following test does not catch empty toolset.
+ ECHO "###" ;
+ ECHO "###" No toolset specified. Please use --toolset option. ;
+ ECHO "###" ;
+ ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
+ EXIT "###" ;
+}
+if ! $(toolset) in $(toolsets)
+{
+ ECHO "###" ;
+ ECHO "###" Unknown toolset: $(toolset) ;
+ ECHO "###" ;
+ ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
+ EXIT "###" ;
+}
+--cc = $(tool.$(toolset).cc) ;
+if $(tool.$(toolset).opt.out[2])
+{
+ if $(tool.$(toolset).opt.out[1]) = $(tool.$(toolset).opt.out[2])
+ {
+ --out = $(tool.$(toolset).opt.out[1]) ;
+ --dir = $(tool.$(toolset).opt.out[3-]) ;
+ }
+ else
+ {
+ --bin = $(tool.$(toolset).opt.out[1]) ;
+ --dir = $(tool.$(toolset).opt.out[2-]) ;
+ }
+}
+else
+{
+ --out = $(tool.$(toolset).opt.out) ;
+}
+--def = $(tool.$(toolset).opt.define) ;
+--flags = $(tool.$(toolset).flags) ;
+--defs = $(tool.$(toolset).defines) ;
+--libs = $(tool.$(toolset).linklibs) ;
+if $(tool.$(toolset).link.cc)
+{
+ --link = $(tool.$(toolset).link.cc) ;
+ if $(tool.$(toolset).link.opt.out[2])
+ {
+ if $(tool.$(toolset).link.opt.out[1]) = $(tool.$(toolset).link.opt.out[2])
+ {
+ --link-out = $(tool.$(toolset).link.opt.out[1]) ;
+ --link-dir = $(tool.$(toolset).link.opt.out[3-]) ;
+ }
+ else
+ {
+ --link-bin = $(tool.$(toolset).link.opt.out[1]) ;
+ --link-dir = $(tool.$(toolset).link.opt.out[2-]) ;
+ }
+ }
+ else
+ {
+ --link-out = $(tool.$(toolset).link.opt.out) ;
+ }
+ --link-def = $(tool.$(toolset).link.opt.define) ;
+ --link-flags = $(tool.$(toolset).link.flags) ;
+ --link-defs = $(tool.$(toolset).link.defines) ;
+ --link-libs = $(tool.$(toolset).link.linklibs) ;
+}
+
+# Put executables in platform-specific subdirectory.
+locate-target = $(LOCATE_TARGET) ;
+if $(OS) = VMS
+{
+ locate-target ?= bin$(.)vms ;
+ platform = vms ;
+}
+else if $(OS) = MAC
+{
+ locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
+ platform = $(OS:L)$(OSPLAT:L) ;
+}
+else if $(OSPLAT)
+{
+ locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
+ platform = $(OS:L)$(OSPLAT:L) ;
+}
+else
+{
+ locate-target ?= bin$(.)$(OS:L) ;
+ platform = $(OS:L) ;
+}
+if $(debug)
+{
+ locate-target = [ .path $(locate-target)$(.)debug ] ;
+}
+if $(profile)
+{
+ locate-target = [ .path $(locate-target)$(.)profile ] ;
+}
+else
+{
+ locate-target = [ .path $(locate-target) ] ;
+}
+
+if --show-locate-target in $(ARGV)
+{
+ ECHO $(locate-target) ;
+}
+
+# We have some different files for UNIX, VMS, and NT.
+jam.source =
+ command.c compile.c debug.c expand.c glob.c
+ hash.c hcache.c headers.c hdrmacro.c
+ jam.c jambase.c jamgram.c
+ lists.c make.c make1.c mem.c newstr.c
+ option.c output.c parse.c regexp.c rules.c
+ scan.c search.c subst.c w32_getreg.c
+ timestamp.c variable.c modules.c strings.c filesys.c
+ builtins.c pwd.c class.c native.c md5.c modules/set.c
+ modules/path.c modules/regex.c modules/property-set.c
+ modules/sequence.c modules/order.c
+ ;
+if $(OS) = NT
+{
+ jam.source += execnt.c filent.c pathunix.c ;
+}
+else if $(OS) = OS2
+{
+ jam.source += execunix.c fileos2.c pathunix.c ;
+}
+else if $(OS) = VMS
+{
+ jam.source += execvms.c filevms.c pathvms.c ;
+}
+else if $(OS) = MAC
+{
+ jam.source += execmac.c filemac.c pathmac.c ;
+}
+else
+{
+ jam.source += execunix.c fileunix.c pathunix.c ;
+}
+
+# Debug assertions, or not.
+if ! $(debug) || --noassert in $(ARGV)
+{
+ --defs += NDEBUG ;
+}
+
+# Enable some optional features.
+--defs += OPT_HEADER_CACHE_EXT ;
+--defs += OPT_GRAPH_DEBUG_EXT ;
+--defs += OPT_SEMAPHORE ;
+--defs += OPT_AT_FILES ;
+--defs += OPT_DEBUG_PROFILE ;
+
+# Bug fixes
+--defs += OPT_FIX_TARGET_VARIABLES_EXT ;
+#~ --defs += OPT_NO_EXTERNAL_VARIABLE_SPLIT ;
+
+# Improvements
+--defs += OPT_IMPROVED_PATIENCE_EXT ;
+
+# Use Boehm GC memory allocator?
+if $(--boehm-gc)
+{
+ --defs += OPT_BOEHM_GC ;
+ if $(debug)
+ {
+ --defs += GC_DEBUG ;
+ }
+}
+
+if $(--duma)
+{
+ --defs += OPT_DUMA ;
+}
+
+if ( $(OS) = NT ) && ! NT in $(--defs)
+{
+ --defs += NT ;
+}
+if $(OS) = VMS
+{
+ --defs += VMS ;
+}
+--defs += YYSTACKSIZE=5000 ;
+
+if $(with-python)
+{
+ --defs += HAVE_PYTHON ;
+}
+
+if $(debug)
+{
+ --defs += BJAM_NEWSTR_NO_ALLOCATE ;
+}
+
+
+# The basic symbolic targets...
+NOTFILE all clean dist ;
+ALWAYS clean ;
+
+# Utility rules and actions...
+rule .clean
+{
+ [DELETE] clean : $(<) ;
+}
+if $(OS) = NT { actions piecemeal together existing [DELETE] {
+ del /F /Q "$(>)"
+} }
+if $(UNIX) = true { actions piecemeal together existing [DELETE] {
+ rm -f "$(>)"
+} }
+if $(OS) = VMS { actions piecemeal together existing [DELETE] {
+ DELETE $(>[--2]:J=";*, ") $(>[-1]);*
+} }
+if $(OS) = NT {
+ --chmod+w = "attrib -r " ;
+}
+if $(UNIX) = true {
+ --chmod+w = "chmod +w " ;
+}
+if $(OS) = VMS {
+ --chmod+w = "SET FILE/PROT=(S:RWED) " ;
+}
+
+rule .mkdir
+{
+ NOUPDATE $(<) ;
+ if $(<:P) { DEPENDS $(<) : $(<:P) ; .mkdir $(<:P) ; }
+ if ! $(md<$(<)>) { [MKDIR] $(<) ; md<$(<)> = - ; }
+}
+if $(OS) = NT { actions [MKDIR] {
+ md "$(<)"
+} }
+if $(UNIX) = true { actions [MKDIR] {
+ mkdir "$(<)"
+} }
+if $(OS) = VMS { actions [MKDIR] {
+ CREATE/DIR $(<J=", ")
+} }
+
+rule .exe
+{
+ local exe = $(<) ;
+ if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) || $(OS) = VMS { exe = $(exe:S=.exe) ; }
+ LOCATE on $(exe) = $(locate-target) ;
+ DEPENDS all : $(exe) ;
+ .mkdir $(locate-target) ;
+ if $(--link)
+ {
+ local objs = ;
+ for local s in $(>)
+ {
+ # Translate any subdir elements into a simple file name.
+ local o = [ MATCH "([^/]+)[/]?(.+)" : $(s) ] ;
+ o = $(o:J=_) ;
+ o = $(o:S=.o) ;
+ objs += $(o) ;
+ LOCATE on $(o) = $(locate-target) ;
+ DEPENDS $(exe) : $(o) ;
+ DEPENDS $(o) : $(s) ;
+ DEPENDS $(o) : $(locate-target) ;
+ [COMPILE] $(o) : $(s) ;
+ .clean $(o) ;
+ }
+ DEPENDS $(exe) : $(objs) ;
+ DEPENDS $(exe) : $(locate-target) ;
+ [COMPILE.LINK] $(exe) : $(objs) ;
+ .clean $(exe) ;
+ }
+ else
+ {
+ DEPENDS $(exe) : $(>) ;
+ DEPENDS $(exe) : $(locate-target) ;
+ [COMPILE] $(exe) : $(>) ;
+ .clean $(exe) ;
+ }
+ return $(exe) ;
+}
+if ! $(--def[2]) { actions [COMPILE] {
+ "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def)$(--defs)" "$(--flags)" "$(--libs)" "$(>)"
+} }
+else { actions [COMPILE] {
+ "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def[1])$(--defs:J=$(--def[2]))$(--def[3])" "$(--flags)" "$(--libs)" "$(>)"
+} }
+if $(OS) = VMS { actions [COMPILE.LINK] {
+ "$(--link)" $(--link-bin)$(<:D=) $(--link-dir)$(<:D)$(./) $(--link-out)$(<) $(--link-def)$(--link-defs) $(--link-flags) "$(--link-libs)" $(>J=", ")
+} }
+else { actions [COMPILE.LINK] {
+ "$(--link)" "$(--link-bin)$(<:D=)" "$(--link-dir)$(<:D)$(./)" "$(--link-out)$(<)" "$(--link-def)$(--link-defs)" "$(--link-flags)" "$(--link-libs)" "$(>)"
+} }
+
+rule .link
+{
+ DEPENDS all : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ [LINK] $(<) : $(>) ;
+ .clean $(<) ;
+}
+if $(OS) = NT { actions [LINK] {
+ copy "$(>)" "$(<)"
+} }
+if $(UNIX) = true { actions [LINK] {
+ ln -fs "$(>)" "$(<)"
+} }
+if $(OS) = VMS { actions [LINK] {
+ COPY/REPLACE $(>) $(<)
+} }
+
+rule .copy
+{
+ DEPENDS all : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ [COPY] $(<) : $(>) ;
+ .clean $(<) ;
+}
+
+# Will be redefined later.
+actions [COPY]
+{
+}
+
+
+rule .move
+{
+ DEPENDS $(<) : $(>) ;
+ [MOVE] $(<) : $(>) ;
+}
+if $(OS) = NT { actions [MOVE] {
+ del /f "$(<)"
+ rename "$(>)" "$(<)"
+} }
+if $(UNIX) = true { actions [MOVE] {
+ mv -f "$(>)" "$(<)"
+} }
+if $(OS) = VMS { actions [MOVE] {
+ RENAME "$(>)" "$(<)"
+} }
+
+# Generate the grammar tokens table, and the real yacc grammar.
+rule .yyacc
+{
+ local exe = [ .exe yyacc : yyacc.c ] ;
+ NOUPDATE $(exe) ;
+ DEPENDS $(<) : $(exe) $(>) ;
+ LEAVES $(<) ;
+ yyacc.exe on $(<) = $(exe:R=$(locate-target)) ;
+ [YYACC] $(<) : $(>) ;
+}
+actions [YYACC] {
+ $(--chmod+w)$(<[1])
+ $(--chmod+w)$(<[2])
+ "$(yyacc.exe)" "$(<)" "$(>)"
+}
+if $(grammar)
+{
+ .yyacc jamgram.y jamgramtab.h : jamgram.yy ;
+}
+else if $(debug)
+{
+ .exe yyacc : yyacc.c ;
+}
+
+# How to build the grammar.
+if $(OS) = NT
+{
+ SUFEXE = .exe ;
+ # try some other likely spellings...
+ PATH ?= $(Path) ;
+ PATH ?= $(path) ;
+}
+SUFEXE ?= "" ;
+
+yacc ?= [ GLOB $(PATH) : yacc$(SUFEXE) ] ;
+yacc ?= [ GLOB $(PATH) : bison$(SUFEXE) ] ;
+yacc ?= [ GLOB "$(ProgramFiles:J= )\\GnuWin32\\bin" "C:\\Program Files\\GnuWin32\\bin" : bison$(SUFEXE) ] ;
+yacc = $(yacc[1]) ;
+switch $(yacc:D=:S=)
+{
+ case bison : yacc += -d --yacc ;
+ case yacc : yacc += -d ;
+}
+if $(debug) && $(yacc)
+{
+ yacc += -t -v ;
+}
+yacc += $(YACCFLAGS) ;
+
+rule .yacc
+{
+ DEPENDS $(<) : $(>) ;
+ LEAVES $(<) ;
+ [YACC] $(<) : $(>) ;
+}
+if $(OS) = NT { actions [YACC] {
+ "$(yacc)" "$(>)"
+ if not errorlevel 1 (
+ del /f "$(<[1])"
+ rename y.tab$(<[1]:S) "$(<[1])"
+ del /f $(<[2])
+ rename y.tab$(<[2]:S) "$(<[2])"
+ ) else set _error_ =
+} }
+if $(UNIX) = true { actions [YACC] {
+ if ` "$(yacc)" "$(>)" ` ; then
+ mv -f y.tab$(<[1]:S) "$(<[1])"
+ mv -f y.tab$(<[2]:S) "$(<[2])"
+ else
+ exit 1
+ fi
+} }
+if $(OS) = VMS { actions [YACC] {
+ IF "$(yacc)" $(>)
+ THEN
+ RENAME y_tab$(<[1]:S) $(<[1])
+ RENAME y_tab$(<[2]:S) $(<[2])
+ ENDIF
+} }
+if $(grammar) && ! $(yacc)
+{
+ EXIT "Could not find the 'yacc' tool, and therefore can not build the grammar." ;
+}
+if $(grammar) && $(yacc)
+{
+ .yacc jamgram.c jamgram.h : jamgram.y ;
+}
+
+# How to build the compiled in jambase.
+rule .mkjambase
+{
+ local exe = [ .exe mkjambase : mkjambase.c ] ;
+ DEPENDS $(<) : $(exe) $(>) ;
+ LEAVES $(<) ;
+ mkjambase.exe on $(<) = $(exe:R=$(locate-target)) ;
+ [MKJAMBASE] $(<) : $(>) ;
+}
+actions [MKJAMBASE] {
+ $(--chmod+w)$(<)
+ $(mkjambase.exe) "$(<)" "$(>)"
+}
+if $(debug)
+{
+ .mkjambase jambase.c : Jambase ;
+}
+
+# How to build Jam.
+rule .jam
+{
+ $(>).exe = [ .exe $(>) : $(jam.source) ] ;
+ DEPENDS all : $($(>).exe) ;
+
+ # Make a copy under the old name.
+ $(<).exe = $(<:S=$($(>).exe:S)) ;
+ LOCATE on $($(<).exe) = $(locate-target) ;
+ .copy $($(<).exe) : $($(>).exe) ;
+ DEPENDS all : $($(<).exe) ;
+}
+.jam bjam : b2 ;
+
+
+# Scan sources for header dependencies.
+# WARNING: Yes those are *REAL TABS* below. DO NOT CHANGE,
+# under any circumstances, to spaces!! And the tabs
+# indenting this are so that if someone is in the mood to
+# replace tabs they hit this comment, and hopefully notice
+# their error.
+rule .scan
+{
+ HDRRULE on $(<:D=) = .hdr.scan ;
+ HDRSCAN on $(<:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
+}
+rule .hdr.scan
+{
+ local hdrs = [ GLOB . : $(>:D=) ] ;
+ INCLUDES $(<:D=) : $(hdrs:D=) ;
+ HDRRULE on $(>:D=) = .hdr.scan ;
+ HDRSCAN on $(>:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
+}
+.scan [ GLOB . : *.c ] ;
+
+# Distribution making from here on out. Assumes that
+# the docs are already built as html at ../doc/html. If
+# they aren't, then the docs are not included in the dist
+# archive.
+dist.license =
+ [ GLOB . : $(LICENSE).txt ]
+ ;
+dist.license = $(dist.license:D=)
+ [ GLOB [ .path .. .. .. ] : $(LICENSE).txt ]
+ [ GLOB [ .path .. boost ] : $(LICENSE).txt ] ;
+dist.docs =
+ [ GLOB . : *.png *.css *.html ]
+ ;
+dist.docs = $(dist.docs:D=)
+ [ GLOB [ .path images ] : *.png ]
+ [ GLOB [ .path jam ] : *.html ]
+ ;
+dist.source =
+ [ GLOB . : *.c *.h ]
+ ;
+dist.source = $(dist.source:D=)
+ $(dist.license[1])
+ $(dist.docs)
+ build.jam build.bat build.sh build_vms.com
+ Jambase
+ jamgram.y jamgram.yy
+ [ .path modules set.c ]
+ [ .path modules path.c ]
+ [ .path modules regex.c ]
+ [ .path modules property-set.c ]
+ [ .path modules sequence.c ]
+ [ .path modules order.c ]
+ [ GLOB [ .path boehm_gc ] : * ]
+ [ GLOB [ .path boehm_gc include ] : * ]
+ [ GLOB [ .path boehm_gc include private ] : * ]
+ [ GLOB [ .path boehm_gc cord ] : * ]
+ [ GLOB [ .path boehm_gc Mac_files ] : * ]
+ [ GLOB [ .path boehm_gc tests ] : * ]
+ [ GLOB [ .path boehm_gc doc ] : * ]
+ ;
+dist.bin =
+ bjam
+ ;
+dist.bin =
+ $(dist.license[1])
+ $(dist.bin:S=$(bjam.exe:S))
+ ;
+
+if $(OS) = NT
+{
+ zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7z.exe" ] ;
+ zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7zn.exe" ] ;
+ zip ?= [ GLOB $(PATH) : zip.exe ] ;
+ zip ?= zip ;
+ zip = $(zip[1]) ;
+ switch $(zip:D=:S=)
+ {
+ case 7z* : zip += a -r -tzip -mx=9 ;
+ case zip : zip += -9r ;
+ }
+ actions piecemeal [PACK] {
+ "$(zip)" "$(<)" "$(>)"
+ }
+ actions piecemeal [ZIP] {
+ "$(zip)" "$(<)" "$(>)"
+ }
+ actions piecemeal [COPY] {
+ copy /Y "$(>)" "$(<)" >NUL:
+ }
+}
+if $(UNIX) = true
+{
+ tar ?= [ GLOB $(PATH) : star bsdtar tar ] ;
+ tar = $(tar[1]) ;
+ switch $(tar:D=:S=)
+ {
+ case star : tar += -c artype=pax -D -d -to-stdout ;
+ case * : tar += -c -f - ;
+ }
+ actions [PACK] {
+ "$(tar)" "$(>)" | gzip -c9 > "$(<)"
+ }
+ #~ actions [PACK] {
+ #~ tar cf "$(<:S=.tar)" "$(>)"
+ #~ }
+ actions [ZIP] {
+ gzip -c9 "$(>)" > "$(<)"
+ }
+ actions [COPY] {
+ cp -Rpf "$(>)" "$(<)"
+ }
+}
+
+# The single binary, compressed.
+rule .binary
+{
+ local zip = ;
+ if $(OS) = NT { zip = $($(<).exe:S=.zip) ; }
+ if $(UNIX) = true { zip = $($(<).exe:S=.tgz) ; }
+ zip = $(zip:S=)-$(VERSION)-$(RELEASE)-$(platform)$(zip:S) ;
+ DEPENDS $(zip) : $($(<).exe) ;
+ DEPENDS dist : $(zip) ;
+ #~ LOCATE on $(zip) = $(locate-target) ;
+ if $(OS) = NT { [ZIP] $(zip) : $($(<).exe) ; }
+ if $(UNIX) = true { [PACK] $(zip) : $($(<).exe) ; }
+ .clean $(zip) ;
+}
+
+# Package some file.
+rule .package ( dst-dir : src-files + )
+{
+ local dst-files ;
+ local src-files-actual ;
+ for local src-path in $(src-files)
+ {
+ if ! [ GLOB $(src-path:P) : $(src-path:B) ] || [ CHECK_IF_FILE $(src-path) ]
+ {
+ local src-subdir = $(src-path:D) ;
+ local src-file = $(src-path) ;
+ while $(src-subdir:D) { src-subdir = $(src-subdir:D) ; }
+ if $(src-subdir) = ".."
+ {
+ src-file = $(src-file:D=) ;
+ }
+ dst-files += $(src-file:R=$(dst-dir)) ;
+ src-files-actual += $(src-path) ;
+ }
+ }
+
+ local pack = ;
+ if $(OS) = NT { pack = $(dst-dir).zip ; }
+ if $(UNIX) = true { pack = $(dst-dir).tgz ; }
+
+ DEPENDS dist : $(pack) ;
+ DEPENDS $(pack) : $(dst-files) ;
+
+ local dst-files-queue = $(dst-files) ;
+ for local src-path in $(src-files-actual)
+ {
+ local dst-file = $(dst-files-queue[1]) ;
+ dst-files-queue = $(dst-files-queue[2-]) ;
+ DEPENDS $(dst-file) : $(src-path) $(dst-file:D) ;
+ .mkdir $(dst-file:D) ;
+
+ [COPY] $(dst-file) : $(src-path) ;
+ .clean $(dst-file) ;
+ }
+
+ [PACK] $(pack) : $(dst-files) ;
+ .clean $(pack) ;
+}
+
+# RPM distro file.
+rpm-tool = [ GLOB $(PATH) : "rpmbuild" ] ;
+rpm-tool ?= [ GLOB $(PATH) : "rpm" ] ;
+rpm-tool = $(rpm-tool[1]) ;
+rule .rpm ( name : source )
+{
+ local rpm-arch = ;
+ switch $(OSPLAT)
+ {
+ case X86 : rpm-arch ?= i386 ;
+ case PPC : rpm-arch ?= ppc ;
+ case AXP : rpm-arch ?= alpha ;
+ # no guaranty for these:
+ case IA64 : rpm-arch ?= ia64 ;
+ case ARM : rpm-arch ?= arm ;
+ case SPARC : rpm-arch ?= sparc ;
+ case * : rpm-arch ?= other ;
+ }
+ local target = $(name)-rpm ;
+ NOTFILE $(target) ;
+ DEPENDS dist : $(target) ;
+ DEPENDS $(target) : $(name).$(rpm-arch).rpm $(name).src.rpm ;
+ DEPENDS $(name).$(rpm-arch).rpm : $(source) ;
+ DEPENDS $(name).src.rpm : $(name).$(rpm-arch).rpm ;
+ docs on $(target) = $(dist.docs:J=" ") ;
+ arch on $(target) = $(rpm-arch) ;
+ if $(rpm-arch) = ppc { target-opt on $(target) = --target= ; }
+ else { target-opt on $(target) = "--target " ; }
+ [RPM] $(target) : $(source) ;
+ .clean $(name).$(rpm-arch).rpm $(name).src.rpm ;
+}
+actions [RPM] {
+ set -e
+ export BOOST_JAM_TOOLSET="$(toolset)"
+ $(rpm-tool) -ta $(target-opt)$(arch) $(>) | tee rpm.out
+ cp `grep -e '^Wrote:' rpm.out | sed 's/^Wrote: //'` .
+ rm -f rpm.out
+}
+
+# The distribution targets. Don't bother with the targets if
+# distribution build not requested.
+if dist in $(ARGV)
+{
+ #~ .binary bjam ;
+ .package $(NAME)-$(VERSION) : $(dist.source) ;
+ .package $(NAME)-$(VERSION)-$(RELEASE)-$(platform) : $(dist.bin) ;
+ if $(rpm-tool)
+ {
+ #~ .rpm $(NAME)-$(VERSION)-$(RELEASE) : $(NAME)-$(VERSION).tgz ;
+ }
+}
diff --git a/jam-files/engine/build.sh b/jam-files/engine/build.sh
new file mode 100755
index 000000000..f1fb806d3
--- /dev/null
+++ b/jam-files/engine/build.sh
@@ -0,0 +1,303 @@
+#!/bin/sh
+
+#~ Copyright 2002-2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Reset the toolset.
+BOOST_JAM_TOOLSET=
+
+# Run a command, and echo before doing so. Also checks the exit
+# status and quits if there was an error.
+echo_run ()
+{
+ echo "$@"
+ $@
+ r=$?
+ if test $r -ne 0 ; then
+ exit $r
+ fi
+}
+
+# Print an error message, and exit with a status of 1.
+error_exit ()
+{
+ echo "###"
+ echo "###" "$@"
+ echo "###"
+ echo "### You can specify the toolset as the argument, i.e.:"
+ echo "### ./build.sh gcc"
+ echo "###"
+ echo "### Toolsets supported by this script are:"
+ echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix,"
+ echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
+ echo "###"
+ echo "### A special toolset; cc, is available which is used as a fallback"
+ echo "### when a more specific toolset is not found and the cc command is"
+ echo "### detected. The 'cc' toolset will use the CC, CFLAGS, and LIBS"
+ echo "### envrironment variables, if present."
+ echo "###"
+ exit 1
+}
+
+# Check that a command is in the PATH.
+test_path ()
+{
+ if `command -v command 1>/dev/null 2>/dev/null`; then
+ command -v $1 1>/dev/null 2>/dev/null
+ else
+ hash $1 1>/dev/null 2>/dev/null
+ fi
+}
+
+# Check that the OS name, as returned by "uname", is as given.
+test_uname ()
+{
+ if test_path uname; then
+ test `uname` = $*
+ fi
+}
+
+# Try and guess the toolset to bootstrap the build with...
+Guess_Toolset ()
+{
+ if test -r /mingw/bin/gcc ; then
+ BOOST_JAM_TOOLSET=mingw
+ BOOST_JAM_TOOLSET_ROOT=/mingw/
+ elif test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin
+ elif test_uname IRIX ; then BOOST_JAM_TOOLSET=mipspro
+ elif test_uname IRIX64 ; then BOOST_JAM_TOOLSET=mipspro
+ elif test_uname OSF1 ; then BOOST_JAM_TOOLSET=tru64cxx
+ elif test_uname QNX && test_path qcc ; then BOOST_JAM_TOOLSET=qcc
+ elif test_path gcc ; then BOOST_JAM_TOOLSET=gcc
+ elif test_path icc ; then BOOST_JAM_TOOLSET=intel-linux
+ elif test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ elif test_path pgcc ; then BOOST_JAM_TOOLSET=pgi
+ elif test_path pathcc ; then BOOST_JAM_TOOLSET=pathscale
+ elif test_path xlc ; then BOOST_JAM_TOOLSET=vacpp
+ elif test_path como ; then BOOST_JAM_TOOLSET=como
+ elif test_path KCC ; then BOOST_JAM_TOOLSET=kcc
+ elif test_path bc++ ; then BOOST_JAM_TOOLSET=kylix
+ elif test_path aCC ; then BOOST_JAM_TOOLSET=acc
+ elif test_uname HP-UX ; then BOOST_JAM_TOOLSET=acc
+ elif test -r /opt/SUNWspro/bin/cc ; then
+ BOOST_JAM_TOOLSET=sunpro
+ BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
+ # Test for "cc" as the default fallback.
+ elif test_path $CC ; then BOOST_JAM_TOOLSET=cc
+ elif test_path cc ; then
+ BOOST_JAM_TOOLSET=cc
+ CC=cc
+ fi
+ if test "$BOOST_JAM_TOOLSET" = "" ; then
+ error_exit "Could not find a suitable toolset."
+ fi
+}
+
+# The one option we support in the invocation
+# is the name of the toolset to force building
+# with.
+case "$1" in
+ --guess-toolset) Guess_Toolset ; echo "$BOOST_JAM_TOOLSET" ; exit 1 ;;
+ -*) Guess_Toolset ;;
+ ?*) BOOST_JAM_TOOLSET=$1 ; shift ;;
+ *) Guess_Toolset ;;
+esac
+BOOST_JAM_OPT_JAM="-o bootstrap/jam0"
+BOOST_JAM_OPT_MKJAMBASE="-o bootstrap/mkjambase0"
+BOOST_JAM_OPT_YYACC="-o bootstrap/yyacc0"
+case $BOOST_JAM_TOOLSET in
+ mingw)
+ if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then
+ export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH
+ fi
+ BOOST_JAM_CC="gcc -DNT"
+ ;;
+
+ gcc)
+ BOOST_JAM_CC=gcc
+ ;;
+
+ darwin)
+ BOOST_JAM_CC=cc
+ ;;
+
+ intel-darwin)
+ BOOST_JAM_CC=icc
+ ;;
+
+ intel-linux)
+ if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0/
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80/
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ fi
+ if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh ; then
+ # iccvars doesn't change LD_RUN_PATH. We adjust LD_RUN_PATH
+ # here in order not to have to rely on ld.so.conf knowing the
+ # icc library directory. We do this before running iccvars.sh
+ # in order to allow a user to add modifications to LD_RUN_PATH
+ # in iccvars.sh.
+ if test -z "${LD_RUN_PATH}"; then
+ LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib"
+ else
+ LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib:${LD_RUN_PATH}"
+ fi
+ export LD_RUN_PATH
+ . ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh
+ fi
+ BOOST_JAM_CC=icc
+ ;;
+
+ vacpp)
+ BOOST_JAM_CC=xlc
+ ;;
+
+ como)
+ BOOST_JAM_CC="como --c"
+ ;;
+
+ kcc)
+ BOOST_JAM_CC=KCC
+ ;;
+
+ kylix)
+ BOOST_JAM_CC=bc++
+ ;;
+
+ mipspro)
+ BOOST_JAM_CC=cc
+ ;;
+
+ pathscale)
+ BOOST_JAM_CC=pathcc
+ ;;
+
+ pgi)
+ BOOST_JAM_CC=pgcc
+ ;;
+
+ sun*)
+ if test -z "${BOOST_JAM_TOOLSET_ROOT}" -a -r /opt/SUNWspro/bin/cc ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
+ fi
+ if test -r "${BOOST_JAM_TOOLSET_ROOT}bin/cc" ; then
+ PATH=${BOOST_JAM_TOOLSET_ROOT}bin:${PATH}
+ export PATH
+ fi
+ BOOST_JAM_CC=cc
+ ;;
+
+ clang*)
+ BOOST_JAM_CC="clang -Wno-unused -Wno-format"
+ BOOST_JAM_TOOLSET=clang
+ ;;
+
+ tru64cxx)
+ BOOST_JAM_CC=cc
+ ;;
+
+ acc)
+ BOOST_JAM_CC="cc -Ae"
+ ;;
+
+ cc)
+ if test -z "$CC" ; then CC=cc ; fi
+ BOOST_JAM_CC=$CC
+ BOOST_JAM_OPT_JAM="$BOOST_JAM_OPT_JAM $CFLAGS $LIBS"
+ BOOST_JAM_OPT_MKJAMBASE="$BOOST_JAM_OPT_MKJAMBASE $CFLAGS $LIBS"
+ BOOST_JAM_OPT_YYACC="$BOOST_JAM_OPT_YYACC $CFLAGS $LIBS"
+ ;;
+
+ qcc)
+ BOOST_JAM_CC=qcc
+ ;;
+
+ *)
+ error_exit "Unknown toolset: $BOOST_JAM_TOOLSET"
+ ;;
+esac
+
+echo "###"
+echo "### Using '$BOOST_JAM_TOOLSET' toolset."
+echo "###"
+
+YYACC_SOURCES="yyacc.c"
+MKJAMBASE_SOURCES="mkjambase.c"
+BJAM_SOURCES="\
+ command.c compile.c debug.c expand.c glob.c hash.c\
+ hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c\
+ newstr.c option.c output.c parse.c pathunix.c pathvms.c regexp.c\
+ rules.c scan.c search.c subst.c timestamp.c variable.c modules.c\
+ strings.c filesys.c builtins.c pwd.c class.c native.c md5.c w32_getreg.c\
+ modules/set.c modules/path.c modules/regex.c modules/property-set.c\
+ modules/sequence.c modules/order.c"
+case $BOOST_JAM_TOOLSET in
+ mingw)
+ BJAM_SOURCES="${BJAM_SOURCES} execnt.c filent.c"
+ ;;
+
+ *)
+ BJAM_SOURCES="${BJAM_SOURCES} execunix.c fileunix.c"
+ ;;
+esac
+
+BJAM_UPDATE=
+if test "$1" = "--update" -o "$2" = "--update" -o "$3" = "--update" -o "$4" = "--update" ; then
+ BJAM_UPDATE="update"
+fi
+if test "${BJAM_UPDATE}" = "update" -a ! -x "./bootstrap/jam0" ; then
+ BJAM_UPDATE=
+fi
+
+if test "${BJAM_UPDATE}" != "update" ; then
+ echo_run rm -rf bootstrap
+ echo_run mkdir bootstrap
+ if test ! -r jamgram.y -o ! -r jamgramtab.h ; then
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_YYACC} ${YYACC_SOURCES}
+ if test -x "./bootstrap/yyacc0" ; then
+ echo_run ./bootstrap/yyacc0 jamgram.y jamgramtab.h jamgram.yy
+ fi
+ fi
+ if test ! -r jamgram.c -o ! -r jamgram.h ; then
+ if test_path yacc ; then YACC="yacc -d"
+ elif test_path bison ; then YACC="bison -y -d --yacc"
+ fi
+ echo_run $YACC jamgram.y
+ mv -f y.tab.c jamgram.c
+ mv -f y.tab.h jamgram.h
+ fi
+ if test ! -r jambase.c ; then
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_MKJAMBASE} ${MKJAMBASE_SOURCES}
+ if test -x "./bootstrap/mkjambase0" ; then
+ echo_run ./bootstrap/mkjambase0 jambase.c Jambase
+ fi
+ fi
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_JAM} ${BJAM_SOURCES}
+fi
+if test -x "./bootstrap/jam0" ; then
+ if test "${BJAM_UPDATE}" != "update" ; then
+ echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" clean
+ fi
+ echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@"
+fi
diff --git a/jam-files/engine/build_vms.com b/jam-files/engine/build_vms.com
new file mode 100644
index 000000000..965b63424
--- /dev/null
+++ b/jam-files/engine/build_vms.com
@@ -0,0 +1,105 @@
+$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson.
+$ ! Distributed under the Boost Software License, Version 1.0.
+$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+$ !
+$ ! bootstrap build script for Jam
+$ !
+$ SAY :== WRITE SYS$OUTPUT
+$ !
+$ ON WARNING THEN CONTINUE
+$ !
+$ IF "" .NES. F$SEARCH("[.bootstrap_vms]*.*")
+$ THEN
+$ SAY "Cleaning previous boostrap files..."
+$ !
+$ SET FILE/PROTECTION=(S:RWED) [.bootstrap_vms]*.*;*
+$ DELETE [.bootstrap_vms]*.*;*
+$ ENDIF
+$ !
+$ IF "" .NES. F$SEARCH("bootstrap_vms.dir")
+$ THEN
+$ SAY "Removing previous boostrap directory..."
+$ !
+$ SET FILE/PROT=(S:RWED) bootstrap_vms.dir
+$ DELETE bootstrap_vms.dir;
+$ ENDIF
+$ !
+$ SAY "Creating boostrap directory..."
+$ !
+$ CREATE/DIR [.bootstrap_vms]
+$ !
+$ SAY "Building bootstrap jam..."
+$ !
+$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC /PREFIX_LIBRARY_ENTRIES=ALL_ENTRIES "
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]builtins.obj builtins.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]command.obj command.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]compile.obj compile.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]execvms.obj execvms.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]expand.obj expand.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]filesys.obj filesys.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]filevms.obj filevms.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]glob.obj glob.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]hash.obj hash.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]hdrmacro.obj hdrmacro.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]headers.obj headers.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jam.obj jam.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jambase.obj jambase.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]jamgram.obj jamgram.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]lists.obj lists.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]make.obj make.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]make1.obj make1.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]modules.obj modules.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]newstr.obj newstr.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]option.obj option.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]parse.obj parse.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]pathvms.obj pathvms.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]pwd.obj pwd.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]regexp.obj regexp.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]rules.obj rules.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]scan.obj scan.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]search.obj search.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]strings.obj strings.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]subst.obj subst.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]timestamp.obj timestamp.c
+$ cc 'CC_FLAGS /OBJECT=[.bootstrap_vms]variable.obj variable.c
+$ link -
+ /EXECUTABLE=[.bootstrap_vms]jam0.exe -
+ [.bootstrap_vms]builtins.obj, -
+ [.bootstrap_vms]command.obj, -
+ [.bootstrap_vms]compile.obj, -
+ [.bootstrap_vms]execvms.obj, -
+ [.bootstrap_vms]expand.obj, -
+ [.bootstrap_vms]filesys.obj, -
+ [.bootstrap_vms]filevms.obj, -
+ [.bootstrap_vms]glob.obj, -
+ [.bootstrap_vms]hash.obj, -
+ [.bootstrap_vms]hdrmacro.obj, -
+ [.bootstrap_vms]headers.obj, -
+ [.bootstrap_vms]jam.obj, -
+ [.bootstrap_vms]jambase.obj, -
+ [.bootstrap_vms]jamgram.obj, -
+ [.bootstrap_vms]lists.obj, -
+ [.bootstrap_vms]make.obj, -
+ [.bootstrap_vms]make1.obj, -
+ [.bootstrap_vms]modules.obj, -
+ [.bootstrap_vms]newstr.obj, -
+ [.bootstrap_vms]option.obj, -
+ [.bootstrap_vms]parse.obj, -
+ [.bootstrap_vms]pathvms.obj, -
+ [.bootstrap_vms]pwd.obj, -
+ [.bootstrap_vms]regexp.obj, -
+ [.bootstrap_vms]rules.obj, -
+ [.bootstrap_vms]scan.obj, -
+ [.bootstrap_vms]search.obj, -
+ [.bootstrap_vms]strings.obj, -
+ [.bootstrap_vms]subst.obj, -
+ [.bootstrap_vms]timestamp.obj, -
+ [.bootstrap_vms]variable.obj
+$ !
+$ SAY "Cleaning any previous build..."
+$ !
+$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset=vmsdecc clean
+$ !
+$ SAY "Building Boost.Jam..."
+$ !
+$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset=vmsdecc
diff --git a/jam-files/engine/builtins.c b/jam-files/engine/builtins.c
new file mode 100644
index 000000000..b28a484ec
--- /dev/null
+++ b/jam-files/engine/builtins.c
@@ -0,0 +1,2310 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "builtins.h"
+#include "rules.h"
+#include "filesys.h"
+#include "newstr.h"
+#include "regexp.h"
+#include "frames.h"
+#include "hash.h"
+#include "strings.h"
+#include "pwd.h"
+#include "pathsys.h"
+#include "make.h"
+#include "hdrmacro.h"
+#include "compile.h"
+#include "native.h"
+#include "variable.h"
+#include "timestamp.h"
+#include "md5.h"
+#include <ctype.h>
+
+#if defined(USE_EXECUNIX)
+# include <sys/types.h>
+# include <sys/wait.h>
+#else
+/*
+ NT does not have wait() and associated macros, it uses the return value
+ of system() instead. Status code group are documented at
+ http://msdn.microsoft.com/en-gb/library/ff565436.aspx
+*/
+# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0)
+# define WEXITSTATUS(w)(w)
+#endif
+
+/*
+ * builtins.c - builtin jam rules
+ *
+ * External routines:
+ *
+ * load_builtin() - define builtin rules
+ *
+ * Internal routines:
+ *
+ * builtin_depends() - DEPENDS/INCLUDES rule.
+ * builtin_echo() - ECHO rule.
+ * builtin_exit() - EXIT rule.
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
+ * builtin_glob() - GLOB rule.
+ * builtin_match() - MATCH rule.
+ *
+ * 01/10/01 (seiwald) - split from compile.c
+ */
+
+
+/*
+ * compile_builtin() - define builtin rules
+ */
+
+#define P0 (PARSE *)0
+#define C0 (char *)0
+
+#if defined( OS_NT ) || defined( OS_CYGWIN )
+ LIST * builtin_system_registry ( PARSE *, FRAME * );
+ LIST * builtin_system_registry_names( PARSE *, FRAME * );
+#endif
+
+int glob( char * s, char * c );
+
+void backtrace ( FRAME * );
+void backtrace_line ( FRAME * );
+void print_source_line( PARSE * );
+
+
+RULE * bind_builtin( char * name, LIST * (* f)( PARSE *, FRAME * ), int flags, char * * args )
+{
+ argument_list* arg_list = 0;
+
+ if ( args )
+ {
+ arg_list = args_new();
+ lol_build( arg_list->data, args );
+ }
+
+ return new_rule_body( root_module(), name, arg_list,
+ parse_make( f, P0, P0, P0, C0, C0, flags ), 1 );
+}
+
+
+RULE * duplicate_rule( char * name, RULE * other )
+{
+ return import_rule( other, root_module(), name );
+}
+
+
+void load_builtins()
+{
+ duplicate_rule( "Always",
+ bind_builtin( "ALWAYS",
+ builtin_flags, T_FLAG_TOUCHED, 0 ) );
+
+ duplicate_rule( "Depends",
+ bind_builtin( "DEPENDS",
+ builtin_depends, 0, 0 ) );
+
+ duplicate_rule( "echo",
+ duplicate_rule( "Echo",
+ bind_builtin( "ECHO",
+ builtin_echo, 0, 0 ) ) );
+
+ {
+ char * args[] = { "message", "*", ":", "result-value", "?", 0 };
+ duplicate_rule( "exit",
+ duplicate_rule( "Exit",
+ bind_builtin( "EXIT",
+ builtin_exit, 0, args ) ) );
+ }
+
+ {
+ char * args[] = { "directories", "*", ":", "patterns", "*", ":", "case-insensitive", "?", 0 };
+ duplicate_rule( "Glob",
+ bind_builtin( "GLOB", builtin_glob, 0, args ) );
+ }
+
+ {
+ char * args[] = { "patterns", "*", 0 };
+ bind_builtin( "GLOB-RECURSIVELY",
+ builtin_glob_recursive, 0, args );
+ }
+
+ duplicate_rule( "Includes",
+ bind_builtin( "INCLUDES",
+ builtin_depends, 1, 0 ) );
+
+ {
+ char * args[] = { "targets", "*", ":", "targets-to-rebuild", "*", 0 };
+ bind_builtin( "REBUILDS",
+ builtin_rebuilds, 0, args );
+ }
+
+ duplicate_rule( "Leaves",
+ bind_builtin( "LEAVES",
+ builtin_flags, T_FLAG_LEAVES, 0 ) );
+
+ duplicate_rule( "Match",
+ bind_builtin( "MATCH",
+ builtin_match, 0, 0 ) );
+
+ {
+ char * args[] = { "string", ":", "delimiters" };
+ bind_builtin( "SPLIT_BY_CHARACTERS",
+ builtin_split_by_characters, 0, 0 );
+ }
+
+ duplicate_rule( "NoCare",
+ bind_builtin( "NOCARE",
+ builtin_flags, T_FLAG_NOCARE, 0 ) );
+
+ duplicate_rule( "NOTIME",
+ duplicate_rule( "NotFile",
+ bind_builtin( "NOTFILE",
+ builtin_flags, T_FLAG_NOTFILE, 0 ) ) );
+
+ duplicate_rule( "NoUpdate",
+ bind_builtin( "NOUPDATE",
+ builtin_flags, T_FLAG_NOUPDATE, 0 ) );
+
+ duplicate_rule( "Temporary",
+ bind_builtin( "TEMPORARY",
+ builtin_flags, T_FLAG_TEMP, 0 ) );
+
+ bind_builtin( "ISFILE",
+ builtin_flags, T_FLAG_ISFILE, 0 );
+
+ duplicate_rule( "HdrMacro",
+ bind_builtin( "HDRMACRO",
+ builtin_hdrmacro, 0, 0 ) );
+
+ /* FAIL_EXPECTED is used to indicate that the result of a target build
+ * action should be inverted (ok <=> fail) this can be useful when
+ * performing test runs from Jamfiles.
+ */
+ bind_builtin( "FAIL_EXPECTED",
+ builtin_flags, T_FLAG_FAIL_EXPECTED, 0 );
+
+ bind_builtin( "RMOLD",
+ builtin_flags, T_FLAG_RMOLD, 0 );
+
+ {
+ char * args[] = { "targets", "*", 0 };
+ bind_builtin( "UPDATE",
+ builtin_update, 0, args );
+ }
+
+ {
+ char * args[] = { "targets", "*",
+ ":", "log", "?",
+ ":", "ignore-minus-n", "?",
+ ":", "ignore-minus-q", "?", 0 };
+ bind_builtin( "UPDATE_NOW",
+ builtin_update_now, 0, args );
+ }
+
+ {
+ char * args[] = { "string", "pattern", "replacements", "+", 0 };
+ duplicate_rule( "subst",
+ bind_builtin( "SUBST",
+ builtin_subst, 0, args ) );
+ }
+
+ {
+ char * args[] = { "module", "?", 0 };
+ bind_builtin( "RULENAMES",
+ builtin_rulenames, 0, args );
+ }
+
+
+ {
+ char * args[] = { "module", "?", 0 };
+ bind_builtin( "VARNAMES",
+ builtin_varnames, 0, args );
+ }
+
+ {
+ char * args[] = { "module", "?", 0 };
+ bind_builtin( "DELETE_MODULE",
+ builtin_delete_module, 0, args );
+ }
+
+ {
+ char * args[] = { "source_module", "?",
+ ":", "source_rules", "*",
+ ":", "target_module", "?",
+ ":", "target_rules", "*",
+ ":", "localize", "?", 0 };
+ bind_builtin( "IMPORT",
+ builtin_import, 0, args );
+ }
+
+ {
+ char * args[] = { "module", "?", ":", "rules", "*", 0 };
+ bind_builtin( "EXPORT",
+ builtin_export, 0, args );
+ }
+
+ {
+ char * args[] = { "levels", "?", 0 };
+ bind_builtin( "CALLER_MODULE",
+ builtin_caller_module, 0, args );
+ }
+
+ {
+ char * args[] = { "levels", "?", 0 };
+ bind_builtin( "BACKTRACE",
+ builtin_backtrace, 0, args );
+ }
+
+ {
+ char * args[] = { 0 };
+ bind_builtin( "PWD",
+ builtin_pwd, 0, args );
+ }
+
+ {
+ char * args[] = { "target", "*", ":", "path", "*", 0 };
+ bind_builtin( "SEARCH_FOR_TARGET",
+ builtin_search_for_target, 0, args );
+ }
+
+ {
+ char * args[] = { "modules_to_import", "+", ":", "target_module", "?", 0 };
+ bind_builtin( "IMPORT_MODULE",
+ builtin_import_module, 0, args );
+ }
+
+ {
+ char * args[] = { "module", "?", 0 };
+ bind_builtin( "IMPORTED_MODULES",
+ builtin_imported_modules, 0, args );
+ }
+
+ {
+ char * args[] = { "instance_module", ":", "class_module", 0 };
+ bind_builtin( "INSTANCE",
+ builtin_instance, 0, args );
+ }
+
+ {
+ char * args[] = { "sequence", "*", 0 };
+ bind_builtin( "SORT",
+ builtin_sort, 0, args );
+ }
+
+ {
+ char * args[] = { "path_parts", "*", 0 };
+ bind_builtin( "NORMALIZE_PATH",
+ builtin_normalize_path, 0, args );
+ }
+
+ {
+ char * args[] = { "args", "*", 0 };
+ bind_builtin( "CALC",
+ builtin_calc, 0, args );
+ }
+
+ {
+ char * args[] = { "module", ":", "rule", 0 };
+ bind_builtin( "NATIVE_RULE",
+ builtin_native_rule, 0, args );
+ }
+
+ {
+ char * args[] = { "module", ":", "rule", ":", "version", 0 };
+ bind_builtin( "HAS_NATIVE_RULE",
+ builtin_has_native_rule, 0, args );
+ }
+
+ {
+ char * args[] = { "module", "*", 0 };
+ bind_builtin( "USER_MODULE",
+ builtin_user_module, 0, args );
+ }
+
+ {
+ char * args[] = { 0 };
+ bind_builtin( "NEAREST_USER_LOCATION",
+ builtin_nearest_user_location, 0, args );
+ }
+
+ {
+ char * args[] = { "file", 0 };
+ bind_builtin( "CHECK_IF_FILE",
+ builtin_check_if_file, 0, args );
+ }
+
+#ifdef HAVE_PYTHON
+ {
+ char * args[] = { "python-module", ":", "function", ":",
+ "jam-module", ":", "rule-name", 0 };
+ bind_builtin( "PYTHON_IMPORT_RULE",
+ builtin_python_import_rule, 0, args );
+ }
+#endif
+
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ {
+ char * args[] = { "key_path", ":", "data", "?", 0 };
+ bind_builtin( "W32_GETREG",
+ builtin_system_registry, 0, args );
+ }
+
+ {
+ char * args[] = { "key_path", ":", "result-type", 0 };
+ bind_builtin( "W32_GETREGNAMES",
+ builtin_system_registry_names, 0, args );
+ }
+# endif
+
+ {
+ char * args[] = { "command", ":", "*", 0 };
+ duplicate_rule( "SHELL",
+ bind_builtin( "COMMAND",
+ builtin_shell, 0, args ) );
+ }
+
+ {
+ char * args[] = { "string", 0 };
+ bind_builtin( "MD5",
+ builtin_md5, 0, args ) ;
+ }
+
+ {
+ char * args[] = { "name", ":", "mode", 0 };
+ bind_builtin( "FILE_OPEN",
+ builtin_file_open, 0, args );
+ }
+
+ {
+ char * args[] = { "string", ":", "width", 0 };
+ bind_builtin( "PAD",
+ builtin_pad, 0, args );
+ }
+
+ {
+ char * args[] = { "targets", "*", 0 };
+ bind_builtin( "PRECIOUS",
+ builtin_precious, 0, args );
+ }
+
+ {
+ char * args [] = { 0 };
+ bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
+ }
+
+ {
+ char * args [] = { "path", 0 };
+ bind_builtin( "MAKEDIR", builtin_makedir, 0, args );
+ }
+
+ /* Initialize builtin modules. */
+ init_set();
+ init_path();
+ init_regex();
+ init_property_set();
+ init_sequence();
+ init_order();
+}
+
+
+/*
+ * builtin_calc() - CALC rule.
+ *
+ * The CALC rule performs simple mathematical operations on two arguments.
+ */
+
+LIST * builtin_calc( PARSE * parse, FRAME * frame )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ LIST * result = 0;
+ long lhs_value;
+ long rhs_value;
+ long result_value;
+ char buffer [ 16 ];
+ char const * lhs;
+ char const * op;
+ char const * rhs;
+
+ if ( arg == 0 ) return L0;
+ lhs = arg->string;
+
+ arg = list_next( arg );
+ if ( arg == 0 ) return L0;
+ op = arg->string;
+
+ arg = list_next( arg );
+ if ( arg == 0 ) return L0;
+ rhs = arg->string;
+
+ lhs_value = atoi( lhs );
+ rhs_value = atoi( rhs );
+
+ if ( strcmp( "+", op ) == 0 )
+ {
+ result_value = lhs_value + rhs_value;
+ }
+ else if ( strcmp( "-", op ) == 0 )
+ {
+ result_value = lhs_value - rhs_value;
+ }
+ else
+ {
+ return L0;
+ }
+
+ sprintf( buffer, "%ld", result_value );
+ result = list_new( result, newstr( buffer ) );
+ return result;
+}
+
+
+/*
+ * builtin_depends() - DEPENDS/INCLUDES rule.
+ *
+ * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the
+ * dependency/includes list of each of the listed targets. It binds both the
+ * targets and sources as TARGETs.
+ */
+
+LIST * builtin_depends( PARSE * parse, FRAME * frame )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * sources = lol_get( frame->args, 1 );
+ LIST * l;
+
+ for ( l = targets; l; l = list_next( l ) )
+ {
+ TARGET * t = bindtarget( l->string );
+
+ /* If doing INCLUDES, switch to the TARGET's include */
+ /* TARGET, creating it if needed. The internal include */
+ /* TARGET shares the name of its parent. */
+
+ if ( parse->num )
+ {
+ if ( !t->includes )
+ {
+ t->includes = copytarget( t );
+ t->includes->original_target = t;
+ }
+ t = t->includes;
+ }
+
+ t->depends = targetlist( t->depends, sources );
+ }
+
+ /* Enter reverse links */
+ for ( l = sources; l; l = list_next( l ) )
+ {
+ TARGET * s = bindtarget( l->string );
+ s->dependants = targetlist( s->dependants, targets );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rebuilds() - REBUILDS rule.
+ *
+ * The REBUILDS builtin rule appends each of the listed rebuild-targets in its
+ * 2nd argument on the rebuilds list of each of the listed targets in its first
+ * argument.
+ */
+
+LIST * builtin_rebuilds( PARSE * parse, FRAME * frame )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * rebuilds = lol_get( frame->args, 1 );
+ LIST * l;
+
+ for ( l = targets; l; l = list_next( l ) )
+ {
+ TARGET * t = bindtarget( l->string );
+ t->rebuilds = targetlist( t->rebuilds, rebuilds );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_echo() - ECHO rule.
+ *
+ * The ECHO builtin rule echoes the targets to the user. No other actions are
+ * taken.
+ */
+
+LIST * builtin_echo( PARSE * parse, FRAME * frame )
+{
+ list_print( lol_get( frame->args, 0 ) );
+ printf( "\n" );
+ fflush( stdout );
+ return L0;
+}
+
+
+/*
+ * builtin_exit() - EXIT rule.
+ *
+ * The EXIT builtin rule echoes the targets to the user and exits the program
+ * with a failure status.
+ */
+
+LIST * builtin_exit( PARSE * parse, FRAME * frame )
+{
+ list_print( lol_get( frame->args, 0 ) );
+ printf( "\n" );
+ if ( lol_get( frame->args, 1 ) )
+ {
+ exit( atoi( lol_get( frame->args, 1 )->string ) );
+ }
+ else
+ {
+ exit( EXITBAD ); /* yeech */
+ }
+ return L0;
+}
+
+
+/*
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
+ *
+ * Builtin_flags() marks the target with the appropriate flag, for use by make0().
+ * It binds each target as a TARGET.
+ */
+
+LIST * builtin_flags( PARSE * parse, FRAME * frame )
+{
+ LIST * l = lol_get( frame->args, 0 );
+ for ( ; l; l = list_next( l ) )
+ bindtarget( l->string )->flags |= parse->num;
+ return L0;
+}
+
+
+/*
+ * builtin_globbing() - GLOB rule.
+ */
+
+struct globbing
+{
+ LIST * patterns;
+ LIST * results;
+ LIST * case_insensitive;
+};
+
+
+static void downcase_inplace( char * p )
+{
+ for ( ; *p; ++p )
+ *p = tolower( *p );
+}
+
+
+static void builtin_glob_back
+(
+ void * closure,
+ char * file,
+ int status,
+ time_t time
+)
+{
+ PROFILE_ENTER( BUILTIN_GLOB_BACK );
+
+ struct globbing * globbing = (struct globbing *)closure;
+ LIST * l;
+ PATHNAME f;
+ string buf[ 1 ];
+
+ /* Null out directory for matching. We wish we had file_dirscan() pass up a
+ * PATHNAME.
+ */
+ path_parse( file, &f );
+ f.f_dir.len = 0;
+
+ /* For globbing, we unconditionally ignore current and parent directory
+ * items. Since they items always exist, there is no reason why caller of
+ * GLOB would want to see them. We could also change file_dirscan(), but
+ * then paths with embedded "." and ".." would not work anywhere.
+ */
+ if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) )
+ {
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+ return;
+ }
+
+ string_new( buf );
+ path_build( &f, buf, 0 );
+
+ if ( globbing->case_insensitive )
+ downcase_inplace( buf->value );
+
+ for ( l = globbing->patterns; l; l = l->next )
+ {
+ if ( !glob( l->string, buf->value ) )
+ {
+ globbing->results = list_new( globbing->results, newstr( file ) );
+ break;
+ }
+ }
+
+ string_free( buf );
+
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+}
+
+
+static LIST * downcase_list( LIST * in )
+{
+ LIST * result = 0;
+
+ string s[ 1 ];
+ string_new( s );
+
+ while ( in )
+ {
+ string_copy( s, in->string );
+ downcase_inplace( s->value );
+ result = list_append( result, list_new( 0, newstr( s->value ) ) );
+ in = in->next;
+ }
+
+ string_free( s );
+ return result;
+}
+
+
+LIST * builtin_glob( PARSE * parse, FRAME * frame )
+{
+ LIST * l = lol_get( frame->args, 0 );
+ LIST * r = lol_get( frame->args, 1 );
+
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = r;
+
+ globbing.case_insensitive
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ = l; /* Always case-insensitive if any files can be found. */
+# else
+ = lol_get( frame->args, 2 );
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( r );
+
+ for ( ; l; l = list_next( l ) )
+ file_dirscan( l->string, builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ return globbing.results;
+}
+
+
+static int has_wildcards( char const * str )
+{
+ size_t const index = strcspn( str, "[]*?" );
+ return str[ index ] == '\0' ? 0 : 1;
+}
+
+
+/*
+ * If 'file' exists, append 'file' to 'list'. Returns 'list'.
+ */
+
+static LIST * append_if_exists( LIST * list, char * file )
+{
+ time_t time;
+ timestamp( file, &time );
+ return time > 0
+ ? list_new( list, newstr( file ) )
+ : list;
+}
+
+
+LIST * glob1( char * dirname, char * pattern )
+{
+ LIST * plist = list_new( L0, pattern );
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = plist;
+
+ globbing.case_insensitive
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ = plist; /* always case-insensitive if any files can be found */
+# else
+ = L0;
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( plist );
+
+ file_dirscan( dirname, builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ list_free( plist );
+
+ return globbing.results;
+}
+
+
+LIST * glob_recursive( char * pattern )
+{
+ LIST * result = L0;
+
+ /* Check if there's metacharacters in pattern */
+ if ( !has_wildcards( pattern ) )
+ {
+ /* No metacharacters. Check if the path exists. */
+ result = append_if_exists(result, pattern);
+ }
+ else
+ {
+ /* Have metacharacters in the pattern. Split into dir/name. */
+ PATHNAME path[ 1 ];
+ path_parse( pattern, path );
+
+ if ( path->f_dir.ptr )
+ {
+ LIST * dirs = L0;
+ string dirname[ 1 ];
+ string basename[ 1 ];
+ string_new( dirname );
+ string_new( basename );
+
+ string_append_range( dirname, path->f_dir.ptr,
+ path->f_dir.ptr + path->f_dir.len );
+
+ path->f_grist.ptr = 0;
+ path->f_grist.len = 0;
+ path->f_dir.ptr = 0;
+ path->f_dir.len = 0;
+ path_build( path, basename, 0 );
+
+ dirs = has_wildcards( dirname->value )
+ ? glob_recursive( dirname->value )
+ : list_new( dirs, dirname->value );
+
+ if ( has_wildcards( basename->value ) )
+ {
+ for ( ; dirs; dirs = dirs->next )
+ result = list_append( result, glob1( dirs->string,
+ basename->value ) );
+ }
+ else
+ {
+ string file_string[ 1 ];
+ string_new( file_string );
+
+ /* No wildcard in basename. */
+ for ( ; dirs; dirs = dirs->next )
+ {
+ path->f_dir.ptr = dirs->string;
+ path->f_dir.len = strlen( dirs->string );
+ path_build( path, file_string, 0 );
+
+ result = append_if_exists( result, file_string->value );
+
+ string_truncate( file_string, 0 );
+ }
+
+ string_free( file_string );
+ }
+
+ string_free( dirname );
+ string_free( basename );
+ }
+ else
+ {
+ /** No directory, just a pattern. */
+ result = list_append( result, glob1( ".", pattern ) );
+ }
+ }
+
+ return result;
+}
+
+
+LIST * builtin_glob_recursive( PARSE * parse, FRAME * frame )
+{
+ LIST * result = L0;
+ LIST * l = lol_get( frame->args, 0 );
+ for ( ; l; l = l->next )
+ result = list_append( result, glob_recursive( l->string ) );
+ return result;
+}
+
+
+/*
+ * builtin_match() - MATCH rule, regexp matching.
+ */
+
+LIST * builtin_match( PARSE * parse, FRAME * frame )
+{
+ LIST * l;
+ LIST * r;
+ LIST * result = 0;
+
+ string buf[ 1 ];
+ string_new( buf );
+
+ /* For each pattern */
+
+ for ( l = lol_get( frame->args, 0 ); l; l = l->next )
+ {
+ /* Result is cached and intentionally never freed. */
+ regexp * re = regex_compile( l->string );
+
+ /* For each string to match against. */
+ for ( r = lol_get( frame->args, 1 ); r; r = r->next )
+ {
+ if ( regexec( re, r->string ) )
+ {
+ int i;
+ int top;
+
+ /* Find highest parameter */
+
+ for ( top = NSUBEXP; top-- > 1; )
+ if ( re->startp[ top ] )
+ break;
+
+ /* And add all parameters up to highest onto list. */
+ /* Must have parameters to have results! */
+ for ( i = 1; i <= top; ++i )
+ {
+ string_append_range( buf, re->startp[ i ], re->endp[ i ] );
+ result = list_new( result, newstr( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+
+ string_free( buf );
+ return result;
+}
+
+LIST * builtin_split_by_characters( PARSE * parse, FRAME * frame )
+{
+ LIST * l1 = lol_get( frame->args, 0 );
+ LIST * l2 = lol_get( frame->args, 1 );
+
+ LIST * result = 0;
+
+ char* s = strdup (l1->string);
+ char* delimiters = l2->string;
+ char* t;
+
+ t = strtok (s, delimiters);
+ while (t)
+ {
+ result = list_new(result, newstr(t));
+ t = strtok (NULL, delimiters);
+ }
+
+ free (s);
+
+ return result;
+}
+
+LIST * builtin_hdrmacro( PARSE * parse, FRAME * frame )
+{
+ LIST * l = lol_get( frame->args, 0 );
+
+ for ( ; l; l = list_next( l ) )
+ {
+ TARGET * t = bindtarget( l->string );
+
+ /* Scan file for header filename macro definitions. */
+ if ( DEBUG_HEADER )
+ printf( "scanning '%s' for header file macro definitions\n",
+ l->string );
+
+ macro_headers( t );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rulenames() - RULENAMES ( MODULE ? ).
+ *
+ * Returns a list of the non-local rule names in the given MODULE. If MODULE is
+ * not supplied, returns the list of rule names in the global module.
+ */
+
+static void add_rule_name( void * r_, void * result_ )
+{
+ RULE * r = (RULE *)r_;
+ LIST * * result = (LIST * *)result_;
+ if ( r->exported )
+ *result = list_new( *result, copystr( r->name ) );
+}
+
+
+LIST * builtin_rulenames( PARSE * parse, FRAME * frame )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
+
+ if ( source_module->rules )
+ hashenumerate( source_module->rules, add_rule_name, &result );
+ return result;
+}
+
+
+/*
+ * builtin_varnames() - VARNAMES ( MODULE ? ).
+ *
+ * Returns a list of the variable names in the given MODULE. If MODULE is not
+ * supplied, returns the list of variable names in the global module.
+ */
+
+/* helper function for builtin_varnames(), below. Used with hashenumerate, will
+ * prepend the key of each element to the list
+ */
+static void add_hash_key( void * np, void * result_ )
+{
+ LIST * * result = (LIST * *)result_;
+ *result = list_new( *result, copystr( *(char * *)np ) );
+}
+
+
+static struct hash * get_running_module_vars()
+{
+ struct hash * dummy;
+ struct hash * vars = NULL;
+ /* Get the global variables pointer (that of the currently running module).
+ */
+ var_hash_swap( &vars );
+ dummy = vars;
+ /* Put the global variables pointer in its right place. */
+ var_hash_swap( &dummy );
+ return vars;
+}
+
+
+LIST * builtin_varnames( PARSE * parse, FRAME * frame )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
+
+ /* The running module _always_ has its 'variables' member set to NULL due to
+ * the way enter_module() and var_hash_swap() work.
+ */
+ struct hash * vars = source_module == frame->module
+ ? get_running_module_vars()
+ : source_module->variables;
+
+ if ( vars )
+ hashenumerate( vars, add_hash_key, &result );
+ return result;
+}
+
+
+/*
+ * builtin_delete_module() - MODULE ?.
+ *
+ * Clears all rules and variables from the given module.
+ */
+
+LIST * builtin_delete_module( PARSE * parse, FRAME * frame )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * source_module = bindmodule( arg0 ? arg0->string : 0 );
+ delete_module( source_module );
+ return result;
+}
+
+
+static void unknown_rule( FRAME * frame, char * key, char * module_name, char * rule_name )
+{
+ backtrace_line( frame->prev );
+ printf( "%s error: rule \"%s\" unknown in module \"%s\"\n", key, rule_name, module_name );
+ backtrace( frame->prev );
+ exit( 1 );
+}
+
+
+/*
+ * builtin_import() - IMPORT
+ * (
+ * SOURCE_MODULE ? :
+ * SOURCE_RULES * :
+ * TARGET_MODULE ? :
+ * TARGET_RULES * :
+ * LOCALIZE ?
+ * )
+ *
+ * The IMPORT rule imports rules from the SOURCE_MODULE into the TARGET_MODULE
+ * as local rules. If either SOURCE_MODULE or TARGET_MODULE is not supplied, it
+ * refers to the global module. SOURCE_RULES specifies which rules from the
+ * SOURCE_MODULE to import; TARGET_RULES specifies the names to give those rules
+ * in TARGET_MODULE. If SOURCE_RULES contains a name which doesn't correspond to
+ * a rule in SOURCE_MODULE, or if it contains a different number of items than
+ * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be
+ * executed in TARGET_MODULE, with corresponding access to its module local
+ * variables.
+ */
+
+LIST * builtin_import( PARSE * parse, FRAME * frame )
+{
+ LIST * source_module_list = lol_get( frame->args, 0 );
+ LIST * source_rules = lol_get( frame->args, 1 );
+ LIST * target_module_list = lol_get( frame->args, 2 );
+ LIST * target_rules = lol_get( frame->args, 3 );
+ LIST * localize = lol_get( frame->args, 4 );
+
+ module_t * target_module =
+ bindmodule( target_module_list ? target_module_list->string : 0 );
+ module_t * source_module =
+ bindmodule( source_module_list ? source_module_list->string : 0 );
+
+ LIST * source_name;
+ LIST * target_name;
+
+ for ( source_name = source_rules, target_name = target_rules;
+ source_name && target_name;
+ source_name = list_next( source_name ),
+ target_name = list_next( target_name ) )
+ {
+ RULE r_;
+ RULE * r = &r_;
+ RULE * imported;
+ r_.name = source_name->string;
+
+ if ( !source_module->rules ||
+ !hashcheck( source_module->rules, (HASHDATA * *)&r ) )
+ unknown_rule( frame, "IMPORT", source_module->name, r_.name );
+
+ imported = import_rule( r, target_module, target_name->string );
+ if ( localize )
+ imported->module = target_module;
+ /* This rule is really part of some other module. Just refer to it here,
+ * but do not let it out.
+ */
+ imported->exported = 0;
+ }
+
+ if ( source_name || target_name )
+ {
+ backtrace_line( frame->prev );
+ printf( "import error: length of source and target rule name lists don't match!\n" );
+ printf( " source: " );
+ list_print( source_rules );
+ printf( "\n target: " );
+ list_print( target_rules );
+ printf( "\n" );
+ backtrace( frame->prev );
+ exit( 1 );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_export() - EXPORT ( MODULE ? : RULES * ).
+ *
+ * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus
+ * exportable). If an element of RULES does not name a rule in MODULE, an error
+ * is issued.
+ */
+
+LIST * builtin_export( PARSE * parse, FRAME * frame )
+{
+ LIST * module_list = lol_get( frame->args, 0 );
+ LIST * rules = lol_get( frame->args, 1 );
+ module_t * m = bindmodule( module_list ? module_list->string : 0 );
+
+ for ( ; rules; rules = list_next( rules ) )
+ {
+ RULE r_;
+ RULE * r = &r_;
+ r_.name = rules->string;
+
+ if ( !m->rules || !hashcheck( m->rules, (HASHDATA * *)&r ) )
+ unknown_rule( frame, "EXPORT", m->name, r_.name );
+
+ r->exported = 1;
+ }
+ return L0;
+}
+
+
+/*
+ * get_source_line() - Retrieve the file and line number that should be
+ * indicated for a given procedure in debug output or an error backtrace.
+ */
+
+static void get_source_line( PARSE * procedure, char * * file, int * line )
+{
+ if ( procedure )
+ {
+ char * f = procedure->file;
+ int l = procedure->line;
+ if ( !strcmp( f, "+" ) )
+ {
+ f = "jambase.c";
+ l += 3;
+ }
+ *file = f;
+ *line = l;
+ }
+ else
+ {
+ *file = "(builtin)";
+ *line = -1;
+ }
+}
+
+
+void print_source_line( PARSE * p )
+{
+ char * file;
+ int line;
+
+ get_source_line( p, &file, &line );
+ if ( line < 0 )
+ printf( "(builtin):" );
+ else
+ printf( "%s:%d:", file, line );
+}
+
+
+/*
+ * backtrace_line() - print a single line of error backtrace for the given
+ * frame.
+ */
+
+void backtrace_line( FRAME * frame )
+{
+ if ( frame == 0 )
+ {
+ printf( "(no frame):" );
+ }
+ else
+ {
+ print_source_line( frame->procedure );
+ printf( " in %s\n", frame->rulename );
+ }
+}
+
+
+/*
+ * backtrace() - Print the entire backtrace from the given frame to the Jambase
+ * which invoked it.
+ */
+
+void backtrace( FRAME * frame )
+{
+ if ( !frame ) return;
+ while ( ( frame = frame->prev ) )
+ backtrace_line( frame );
+}
+
+
+/*
+ * builtin_backtrace() - A Jam version of the backtrace function, taking no
+ * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME
+ * describing each frame. Note that the module-name is always followed by a
+ * period.
+ */
+
+LIST * builtin_backtrace( PARSE * parse, FRAME * frame )
+{
+ LIST * levels_arg = lol_get( frame->args, 0 );
+ int levels = levels_arg ? atoi( levels_arg->string ) : ( (unsigned int)(-1) >> 1 ) ;
+
+ LIST * result = L0;
+ for ( ; ( frame = frame->prev ) && levels ; --levels )
+ {
+ char * file;
+ int line;
+ char buf[32];
+ get_source_line( frame->procedure, &file, &line );
+ sprintf( buf, "%d", line );
+ result = list_new( result, newstr( file ) );
+ result = list_new( result, newstr( buf ) );
+ result = list_new( result, newstr( frame->module->name ) );
+ result = list_new( result, newstr( frame->rulename ) );
+ }
+ return result;
+}
+
+
+/*
+ * builtin_caller_module() - CALLER_MODULE ( levels ? )
+ *
+ * If levels is not supplied, returns the name of the module of the rule which
+ * called the one calling this one. If levels is supplied, it is interpreted as
+ * an integer specifying a number of additional levels of call stack to traverse
+ * in order to locate the module in question. If no such module exists, returns
+ * the empty list. Also returns the empty list when the module in question is
+ * the global module. This rule is needed for implementing module import
+ * behavior.
+ */
+
+LIST * builtin_caller_module( PARSE * parse, FRAME * frame )
+{
+ LIST * levels_arg = lol_get( frame->args, 0 );
+ int levels = levels_arg ? atoi( levels_arg->string ) : 0 ;
+
+ int i;
+ for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i )
+ frame = frame->prev;
+
+ if ( frame->module == root_module() )
+ return L0;
+
+ {
+ LIST * result;
+ string name;
+ string_copy( &name, frame->module->name );
+ string_pop_back( &name );
+ result = list_new( L0, newstr(name.value) );
+ string_free( &name );
+ return result;
+ }
+}
+
+
+/*
+ * Return the current working directory.
+ *
+ * Usage: pwd = [ PWD ] ;
+ */
+
+LIST * builtin_pwd( PARSE * parse, FRAME * frame )
+{
+ return pwd();
+}
+
+
+/*
+ * Adds targets to the list of target that jam will attempt to update.
+ */
+
+LIST * builtin_update( PARSE * parse, FRAME * frame )
+{
+ LIST * result = list_copy( L0, targets_to_update() );
+ LIST * arg1 = lol_get( frame->args, 0 );
+ clear_targets_to_update();
+ for ( ; arg1; arg1 = list_next( arg1 ) )
+ mark_target_for_updating( newstr( arg1->string ) );
+ return result;
+}
+
+extern int anyhow;
+int last_update_now_status;
+
+/* Takes a list of target names as first argument, and immediately
+ updates them.
+ Second parameter, if specified, if the descriptor (converted to a string)
+ of a log file where all build output is redirected.
+ Third parameter, if non-empty, specifies that the -n option should have
+ no effect -- that is, all out-of-date targets should be rebuild.
+*/
+LIST * builtin_update_now( PARSE * parse, FRAME * frame )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * log = lol_get( frame->args, 1 );
+ LIST * force = lol_get (frame->args, 2);
+ LIST * continue_ = lol_get(frame->args, 3);
+ int status = 0;
+ int original_stdout;
+ int original_stderr;
+ int n;
+ int targets_count;
+ const char** targets2;
+ int i;
+ int original_noexec;
+ int original_quitquick;
+
+
+ if (log)
+ {
+ int fd = atoi(log->string);
+ /* Redirect stdout and stderr, temporary, to the log file. */
+ original_stdout = dup (0);
+ original_stderr = dup (1);
+ dup2 (fd, 0);
+ dup2 (fd, 1);
+ }
+
+ if (force)
+ {
+ original_noexec = globs.noexec;
+ globs.noexec = 0;
+ original_quitquick = globs.quitquick;
+ globs.quitquick = 0;
+ }
+
+ if (continue_)
+ {
+ original_quitquick = globs.quitquick;
+ globs.quitquick = 0;
+ }
+
+ targets_count = list_length( targets );
+ targets2 = (const char * *)BJAM_MALLOC( targets_count * sizeof( char * ) );
+ for (i = 0 ; targets; targets = list_next( targets ) )
+ targets2[ i++ ] = targets->string;
+ status |= make( targets_count, targets2, anyhow);
+ free( targets );
+
+ if (force)
+ {
+ globs.noexec = original_noexec;
+ globs.quitquick = original_quitquick;
+ }
+
+ if (continue_)
+ {
+ globs.quitquick = original_quitquick;
+ }
+
+ if (log)
+ {
+ /* Flush whatever stdio might have buffered, while descriptions
+ 0 and 1 still refer to the log file. */
+ fflush (stdout);
+ fflush (stderr);
+ dup2 (original_stdout, 0);
+ dup2 (original_stderr, 1);
+ close (original_stdout);
+ close (original_stderr);
+ }
+
+ last_update_now_status = status;
+
+ if (status == 0)
+ return list_new (L0, newstr ("ok"));
+ else
+ return L0;
+}
+
+LIST * builtin_search_for_target( PARSE * parse, FRAME * frame )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LIST * arg2 = lol_get( frame->args, 1 );
+ TARGET * t = search_for_target( arg1->string, arg2 );
+ return list_new( L0, t->name );
+}
+
+
+LIST * builtin_import_module( PARSE * parse, FRAME * frame )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LIST * arg2 = lol_get( frame->args, 1 );
+ module_t * m = arg2 ? bindmodule( arg2->string ) : root_module();
+ import_module( arg1, m );
+ return L0;
+}
+
+
+LIST * builtin_imported_modules( PARSE * parse, FRAME * frame )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ return imported_modules( bindmodule( arg0 ? arg0->string : 0 ) );
+}
+
+
+LIST * builtin_instance( PARSE * parse, FRAME * frame )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LIST * arg2 = lol_get( frame->args, 1 );
+ module_t * const instance = bindmodule( arg1->string );
+ module_t * const class_module = bindmodule( arg2->string );
+ instance->class_module = class_module;
+ return L0;
+}
+
+
+LIST * builtin_sort( PARSE * parse, FRAME * frame )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ return list_sort( arg1 );
+}
+
+
+LIST * builtin_normalize_path( PARSE * parse, FRAME * frame )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ /* First, we iterate over all '/'-separated elements, starting from the end
+ * of string. If we see a '..', we remove a previous path elements. If we
+ * see '.', we remove it. The removal is done by overwriting data using '\1'
+ * in the string. After the whole string has been processed, we do a second
+ * pass, removing all the entered '\1' characters.
+ */
+
+ string in[ 1 ];
+ string out[ 1 ];
+ /* Last character of the part of string still to be processed. */
+ char * end;
+ /* Working pointer. */
+ char * current;
+ /* Number of '..' elements seen and not processed yet. */
+ int dotdots = 0;
+ int rooted = 0;
+ char * result = 0;
+
+ /* Make a copy of input: we should not change it. Prepend a '/' before it as
+ * a guard for the algorithm later on and remember whether it was originally
+ * rooted or not.
+ */
+ string_new( in );
+ string_push_back( in, '/' );
+ for ( ; arg; arg = list_next( arg ) )
+ {
+ if ( arg->string[ 0 ] != '\0' )
+ {
+ if ( in->size == 1 )
+ rooted = ( ( arg->string[ 0 ] == '/' ) ||
+ ( arg->string[ 0 ] == '\\' ) );
+ else
+ string_append( in, "/" );
+ string_append( in, arg->string );
+ }
+ }
+
+ /* Convert \ into /. On Windows, paths using / and \ are equivalent, and we
+ * want this function to obtain a canonic representation.
+ */
+ for ( current = in->value, end = in->value + in->size;
+ current < end; ++current )
+ if ( *current == '\\' )
+ *current = '/';
+
+ /* Now we remove any extra path elements by overwriting them with '\1'
+ * characters and cound how many more unused '..' path elements there are
+ * remaining. Note that each remaining path element with always starts with
+ * a '/' character.
+ */
+ for ( end = in->value + in->size - 1; end >= in->value; )
+ {
+ /* Set 'current' to the next occurence of '/', which always exists. */
+ for ( current = end; *current != '/'; --current );
+
+ if ( current == end )
+ {
+ /* Found a trailing or duplicate '/'. Remove it. */
+ *current = '\1';
+ }
+ else if ( ( end - current == 1 ) && ( *(current + 1) == '.' ) )
+ {
+ /* Found '/.'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ }
+ else if ( ( end - current == 2 ) && ( *(current + 1) == '.' ) && ( *(current + 2) == '.' ) )
+ {
+ /* Found '/..'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ *(current + 2) = '\1';
+ ++dotdots;
+ }
+ else if ( dotdots )
+ {
+ memset( current, '\1', end - current + 1 );
+ --dotdots;
+ }
+ end = current - 1;
+ }
+
+ string_new( out );
+
+ /* Now we know that we need to add exactly dotdots '..' path elements to the
+ * front and that our string is either empty or has a '/' as its first
+ * significant character. If we have any dotdots remaining then the passed
+ * path must not have been rooted or else it is invalid we return an empty
+ * list.
+ */
+ if ( dotdots )
+ {
+ if ( rooted ) return L0;
+ do
+ string_append( out, "/.." );
+ while ( --dotdots );
+ }
+
+ /* Now we actually remove all the path characters marked for removal. */
+ for ( current = in->value; *current; ++current )
+ if ( *current != '\1' )
+ string_push_back( out, *current );
+
+ /* Here we know that our string contains no '\1' characters and is either
+ * empty or has a '/' as its initial character. If the original path was not
+ * rooted and we have a non-empty path we need to drop the initial '/'. If
+ * the original path was rooted and we have an empty path we need to add
+ * back the '/'.
+ */
+ result = newstr( out->size ? out->value + !rooted : ( rooted ? "/" : "." ) );
+
+ string_free( out );
+ string_free( in );
+
+ return list_new( 0, result );
+}
+
+
+LIST * builtin_native_rule( PARSE * parse, FRAME * frame )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+
+ module_t * module = bindmodule( module_name->string );
+
+ native_rule_t n;
+ native_rule_t * np = &n;
+ n.name = rule_name->string;
+ if ( module->native_rules && hashcheck( module->native_rules, (HASHDATA * *)&np ) )
+ {
+ new_rule_body( module, np->name, np->arguments, np->procedure, 1 );
+ }
+ else
+ {
+ backtrace_line( frame->prev );
+ printf( "error: no native rule \"%s\" defined in module \"%s\"\n",
+ n.name, module->name );
+ backtrace( frame->prev );
+ exit( 1 );
+ }
+ return L0;
+}
+
+
+LIST * builtin_has_native_rule( PARSE * parse, FRAME * frame )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+ LIST * version = lol_get( frame->args, 2 );
+
+ module_t * module = bindmodule( module_name->string );
+
+ native_rule_t n;
+ native_rule_t * np = &n;
+ n.name = rule_name->string;
+ if ( module->native_rules && hashcheck( module->native_rules, (HASHDATA * *)&np ) )
+ {
+ int expected_version = atoi( version->string );
+ if ( np->version == expected_version )
+ return list_new( 0, newstr( "true" ) );
+ }
+ return L0;
+}
+
+
+LIST * builtin_user_module( PARSE * parse, FRAME * frame )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ for ( ; module_name; module_name = module_name->next )
+ {
+ module_t * m = bindmodule( module_name->string );
+ m->user_module = 1;
+ }
+ return L0;
+}
+
+
+LIST * builtin_nearest_user_location( PARSE * parse, FRAME * frame )
+{
+ FRAME * nearest_user_frame =
+ frame->module->user_module ? frame : frame->prev_user;
+ if ( !nearest_user_frame )
+ return L0;
+
+ {
+ LIST * result = 0;
+ char * file;
+ int line;
+ char buf[32];
+
+ get_source_line( nearest_user_frame->procedure, &file, &line );
+ sprintf( buf, "%d", line );
+ result = list_new( result, newstr( file ) );
+ result = list_new( result, newstr( buf ) );
+ return result;
+ }
+}
+
+
+LIST * builtin_check_if_file( PARSE * parse, FRAME * frame )
+{
+ LIST * name = lol_get( frame->args, 0 );
+ return file_is_file( name->string ) == 1
+ ? list_new( 0, newstr( "true" ) )
+ : L0 ;
+}
+
+
+LIST * builtin_md5( PARSE * parse, FRAME * frame )
+{
+ LIST * l = lol_get( frame->args, 0 );
+ char* s = l->string;
+
+ md5_state_t state;
+ md5_byte_t digest[16];
+ char hex_output[16*2 + 1];
+
+ int di;
+
+ md5_init(&state);
+ md5_append(&state, (const md5_byte_t *)s, strlen(s));
+ md5_finish(&state, digest);
+
+ for (di = 0; di < 16; ++di)
+ sprintf(hex_output + di * 2, "%02x", digest[di]);
+
+ return list_new (0, newstr(hex_output));
+}
+
+LIST *builtin_file_open( PARSE *parse, FRAME *frame )
+{
+ char* name = lol_get(frame->args, 0)->string;
+ char* mode = lol_get(frame->args, 1)->string;
+ int fd;
+ char buffer[sizeof("4294967295")];
+
+ if (strcmp(mode, "w") == 0)
+ {
+ fd = open(name, O_WRONLY|O_CREAT|O_TRUNC, 0666);
+ }
+ else
+ {
+ fd = open(name, O_RDONLY);
+ }
+
+ if (fd != -1)
+ {
+ sprintf(buffer, "%d", fd);
+ return list_new(L0, newstr(buffer));
+ }
+ else
+ {
+ return L0;
+ }
+}
+
+LIST *builtin_pad( PARSE *parse, FRAME *frame )
+{
+ char *string = lol_get(frame->args, 0)->string;
+ char *width_s = lol_get(frame->args, 1)->string;
+
+ int current = strlen (string);
+ int desired = atoi(width_s);
+ if (current >= desired)
+ return list_new (L0, string);
+ else
+ {
+ char *buffer = malloc (desired + 1);
+ int i;
+ LIST *result;
+
+ strcpy (buffer, string);
+ for (i = current; i < desired; ++i)
+ buffer[i] = ' ';
+ buffer[desired] = '\0';
+ result = list_new (L0, newstr (buffer));
+ free (buffer);
+ return result;
+ }
+}
+
+LIST *builtin_precious( PARSE *parse, FRAME *frame )
+{
+ LIST* targets = lol_get(frame->args, 0);
+
+ for ( ; targets; targets = list_next( targets ) )
+ {
+ TARGET* t = bindtarget (targets->string);
+ t->flags |= T_FLAG_PRECIOUS;
+ }
+
+ return L0;
+}
+
+LIST *builtin_self_path( PARSE *parse, FRAME *frame )
+{
+ extern char *saved_argv0;
+ char *p = executable_path (saved_argv0);
+ if (p)
+ {
+ LIST* result = list_new (0, newstr (p));
+ free(p);
+ return result;
+ }
+ else
+ {
+ return L0;
+ }
+}
+
+LIST *builtin_makedir( PARSE *parse, FRAME *frame )
+{
+ LIST *path = lol_get(frame->args, 0);
+
+ if (file_mkdir(path->string) == 0)
+ {
+ LIST *result = list_new (0, newstr(path->string));
+ return result;
+ }
+ else
+ {
+ return L0;
+ }
+}
+
+#ifdef HAVE_PYTHON
+
+LIST * builtin_python_import_rule( PARSE * parse, FRAME * frame )
+{
+ static int first_time = 1;
+ char * python_module = lol_get( frame->args, 0 )->string;
+ char * python_function = lol_get( frame->args, 1 )->string;
+ char * jam_module = lol_get( frame->args, 2 )->string;
+ char * jam_rule = lol_get( frame->args, 3 )->string;
+
+ PyObject * pName;
+ PyObject * pModule;
+ PyObject * pDict;
+ PyObject * pFunc;
+
+ if ( first_time )
+ {
+ /* At the first invocation, we add the value of the global
+ * EXTRA_PYTHONPATH to the sys.path Python variable.
+ */
+ LIST * extra = 0;
+ module_t * outer_module = frame->module;
+
+ first_time = 0;
+
+ if ( outer_module != root_module() )
+ {
+ exit_module( outer_module );
+ enter_module( root_module() );
+ }
+
+ extra = var_get( "EXTRA_PYTHONPATH" );
+
+ if ( outer_module != root_module() )
+ {
+ exit_module( root_module() );
+ enter_module( outer_module );
+ }
+
+ for ( ; extra; extra = extra->next )
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, "import sys\nsys.path.append(\"" );
+ string_append( buf, extra->string );
+ string_append( buf, "\")\n" );
+ PyRun_SimpleString( buf->value );
+ string_free( buf );
+ }
+ }
+
+ pName = PyString_FromString( python_module );
+ pModule = PyImport_Import( pName );
+ Py_DECREF( pName );
+
+ if ( pModule != NULL )
+ {
+ pDict = PyModule_GetDict( pModule );
+ pFunc = PyDict_GetItemString( pDict, python_function );
+
+ if ( pFunc && PyCallable_Check( pFunc ) )
+ {
+ module_t * m = bindmodule( jam_module );
+ RULE * r = bindrule( jam_rule, m );
+
+ /* Make pFunc owned. */
+ Py_INCREF( pFunc );
+
+ r->python_function = pFunc;
+ }
+ else
+ {
+ if ( PyErr_Occurred() )
+ PyErr_Print();
+ fprintf( stderr, "Cannot find function \"%s\"\n", python_function );
+ }
+ Py_DECREF( pModule );
+ }
+ else
+ {
+ PyErr_Print();
+ fprintf( stderr, "Failed to load \"%s\"\n", python_module );
+ }
+ return L0;
+
+}
+
+#endif
+
+void lol_build( LOL * lol, char * * elements )
+{
+ LIST * l = L0;
+ lol_init( lol );
+
+ while ( elements && *elements )
+ {
+ if ( !strcmp( *elements, ":" ) )
+ {
+ lol_add( lol, l );
+ l = L0 ;
+ }
+ else
+ {
+ l = list_new( l, newstr( *elements ) );
+ }
+ ++elements;
+ }
+
+ if ( l != L0 )
+ lol_add( lol, l );
+}
+
+
+#ifdef HAVE_PYTHON
+
+/*
+ * Calls the bjam rule specified by name passed in 'args'. The name is looked up
+ * in the context of bjam's 'python_interface' module. Returns the list of
+ * string retured by the rule.
+ */
+
+PyObject* bjam_call( PyObject * self, PyObject * args )
+{
+ FRAME inner[ 1 ];
+ LIST * result;
+ PARSE * p;
+ char * rulename;
+
+ /* Build up the list of arg lists. */
+ frame_init( inner );
+ inner->prev = 0;
+ inner->prev_user = 0;
+ inner->module = bindmodule( "python_interface" );
+ inner->procedure = 0;
+
+ /* Extract the rule name and arguments from 'args'. */
+
+ /* PyTuple_GetItem returns borrowed reference. */
+ rulename = PyString_AsString( PyTuple_GetItem( args, 0 ) );
+ {
+ int i = 1;
+ int size = PyTuple_Size( args );
+ for ( ; i < size; ++i )
+ {
+ PyObject * a = PyTuple_GetItem( args, i );
+ if ( PyString_Check( a ) )
+ {
+ lol_add( inner->args, list_new( 0, newstr(
+ PyString_AsString( a ) ) ) );
+ }
+ else if ( PySequence_Check( a ) )
+ {
+ LIST * l = 0;
+ int s = PySequence_Size( a );
+ int i = 0;
+ for ( ; i < s; ++i )
+ {
+ /* PySequence_GetItem returns new reference. */
+ PyObject * e = PySequence_GetItem( a, i );
+ char * s = PyString_AsString( e );
+ if ( !s )
+ {
+ printf( "Invalid parameter type passed from Python\n" );
+ exit( 1 );
+ }
+ l = list_new( l, newstr( s ) );
+ Py_DECREF( e );
+ }
+ lol_add( inner->args, l );
+ }
+ }
+ }
+
+ result = evaluate_rule( rulename, inner );
+
+ frame_free( inner );
+
+ /* Convert the bjam list into a Python list result. */
+ {
+ PyObject * pyResult = PyList_New( list_length( result ) );
+ int i = 0;
+ while ( result )
+ {
+ PyList_SetItem( pyResult, i, PyString_FromString( result->string ) );
+ result = list_next( result );
+ i += 1;
+ }
+ list_free( result );
+ return pyResult;
+ }
+}
+
+
+/*
+ * Accepts four arguments:
+ * - module name
+ * - rule name,
+ * - Python callable.
+ * - (optional) bjam language function signature.
+ * Creates a bjam rule with the specified name in the specified module, which will
+ * invoke the Python callable.
+ */
+
+PyObject * bjam_import_rule( PyObject * self, PyObject * args )
+{
+ char * module;
+ char * rule;
+ PyObject * func;
+ PyObject * bjam_signature = NULL;
+ module_t * m;
+ RULE * r;
+
+ if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
+ &module, &rule, &func, &bjam_signature ) )
+ return NULL;
+
+ if ( !PyCallable_Check( func ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError,
+ "Non-callable object passed to bjam.import_rule" );
+ return NULL;
+ }
+
+ m = bindmodule( *module ? module : 0 );
+ r = bindrule( rule, m );
+
+ /* Make pFunc owned. */
+ Py_INCREF( func );
+
+ r->python_function = func;
+ r->arguments = 0;
+
+ if (bjam_signature)
+ {
+ argument_list * arg_list = args_new();
+ Py_ssize_t i;
+
+ Py_ssize_t s = PySequence_Size (bjam_signature);
+ for (i = 0; i < s; ++i)
+ {
+ PyObject* v = PySequence_GetItem (bjam_signature, i);
+ lol_add(arg_list->data, list_from_python (v));
+ Py_DECREF(v);
+ }
+ r->arguments = arg_list;
+ }
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Accepts four arguments:
+ * - an action name
+ * - an action body
+ * - a list of variable that will be bound inside the action
+ * - integer flags.
+ * Defines an action on bjam side.
+ */
+
+PyObject * bjam_define_action( PyObject * self, PyObject * args )
+{
+ char * name;
+ char * body;
+ module_t * m;
+ PyObject * bindlist_python;
+ int flags;
+ LIST * bindlist = L0;
+ int n;
+ int i;
+
+ if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body,
+ &PyList_Type, &bindlist_python, &flags ) )
+ return NULL;
+
+ n = PyList_Size( bindlist_python );
+ for ( i = 0; i < n; ++i )
+ {
+ PyObject * next = PyList_GetItem( bindlist_python, i );
+ if ( !PyString_Check( next ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError,
+ "bind list has non-string type" );
+ return NULL;
+ }
+ bindlist = list_new( bindlist, PyString_AsString( next ) );
+ }
+
+ new_rule_actions( root_module(), name, newstr( body ), bindlist, flags );
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Returns the value of a variable in root Jam module.
+ */
+
+PyObject * bjam_variable( PyObject * self, PyObject * args )
+{
+ char * name;
+ LIST * value;
+ PyObject * result;
+ int i;
+
+ if ( !PyArg_ParseTuple( args, "s", &name ) )
+ return NULL;
+
+ enter_module( root_module() );
+ value = var_get( name );
+ exit_module( root_module() );
+
+ result = PyList_New( list_length( value ) );
+ for ( i = 0; value; value = list_next( value ), ++i )
+ PyList_SetItem( result, i, PyString_FromString( value->string ) );
+
+ return result;
+}
+
+
+PyObject * bjam_backtrace( PyObject * self, PyObject * args )
+{
+ PyObject * result = PyList_New( 0 );
+ struct frame * f = frame_before_python_call;
+
+ for ( ; f = f->prev; )
+ {
+ PyObject * tuple = PyTuple_New( 4 );
+ char * file;
+ int line;
+ char buf[ 32 ];
+
+ get_source_line( f->procedure, &file, &line );
+ sprintf( buf, "%d", line );
+
+ /* PyTuple_SetItem steals reference. */
+ PyTuple_SetItem( tuple, 0, PyString_FromString( file ) );
+ PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) );
+ PyTuple_SetItem( tuple, 2, PyString_FromString( f->module->name ) );
+ PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) );
+
+ PyList_Append( result, tuple );
+ Py_DECREF( tuple );
+ }
+ return result;
+}
+
+PyObject * bjam_caller( PyObject * self, PyObject * args )
+{
+ PyObject *result = PyString_FromString(
+ frame_before_python_call->prev->module->name);
+ return result;
+}
+
+#endif /* #ifdef HAVE_PYTHON */
+
+
+#ifdef HAVE_POPEN
+
+#if defined(_MSC_VER) || defined(__BORLANDC__)
+ #define popen windows_popen_wrapper
+ #define pclose _pclose
+
+ /*
+ * This wrapper is a workaround for a funny _popen() feature on Windows
+ * where it eats external quotes in some cases. The bug seems to be related
+ * to the quote stripping functionality used by the Windows cmd.exe
+ * interpreter when its /S is not specified.
+ *
+ * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP
+ * SP3:
+ *
+ * 1. If all of the following conditions are met, then quote characters on
+ * the command line are preserved:
+ *
+ * - no /S switch
+ * - exactly two quote characters
+ * - no special characters between the two quote characters, where
+ * special is one of: &<>()@^|
+ * - there are one or more whitespace characters between the two quote
+ * characters
+ * - the string between the two quote characters is the name of an
+ * executable file.
+ *
+ * 2. Otherwise, old behavior is to see if the first character is a quote
+ * character and if so, strip the leading character and remove the last
+ * quote character on the command line, preserving any text after the
+ * last quote character.
+ *
+ * This causes some commands containing quotes not to be executed correctly.
+ * For example:
+ *
+ * "\Long folder name\aaa.exe" --name="Jurko" --no-surname
+ *
+ * would get its outermost quotes stripped and would be executed as:
+ *
+ * \Long folder name\aaa.exe" --name="Jurko --no-surname
+ *
+ * which would report an error about '\Long' not being a valid command.
+ *
+ * cmd.exe help seems to indicate it would be enough to add an extra space
+ * character in front of the command to avoid this but this does not work,
+ * most likely due to the shell first stripping all leading whitespace
+ * characters from the command.
+ *
+ * Solution implemented here is to quote the whole command in case it
+ * contains any quote characters. Note thought this will not work correctly
+ * should Windows ever 'fix' this feature.
+ * (03.06.2008.) (Jurko)
+ */
+ static FILE * windows_popen_wrapper( char * command, char * mode )
+ {
+ int extra_command_quotes_needed = ( strchr( command, '"' ) != 0 );
+ string quoted_command;
+ FILE * result;
+
+ if ( extra_command_quotes_needed )
+ {
+ string_new( &quoted_command );
+ string_append( &quoted_command, "\"" );
+ string_append( &quoted_command, command );
+ string_append( &quoted_command, "\"" );
+ command = quoted_command.value;
+ }
+
+ result = _popen( command, "r" );
+
+ if ( extra_command_quotes_needed )
+ string_free( &quoted_command );
+
+ return result;
+ }
+#endif
+
+
+static char * rtrim(char *s)
+{
+ char *p = s;
+ while(*p) ++p;
+ for(--p; p >= s && isspace(*p); *p-- = 0);
+ return s;
+}
+
+LIST * builtin_shell( PARSE * parse, FRAME * frame )
+{
+ LIST * command = lol_get( frame->args, 0 );
+ LIST * result = 0;
+ string s;
+ int ret;
+ char buffer[ 1024 ];
+ FILE * p = NULL;
+ int exit_status = -1;
+ int exit_status_opt = 0;
+ int no_output_opt = 0;
+ int strip_eol_opt = 0;
+
+ /* Process the variable args options. */
+ {
+ int a = 1;
+ LIST * arg = lol_get( frame->args, a );
+ while ( arg )
+ {
+ if ( strcmp( "exit-status", arg->string ) == 0 )
+ {
+ exit_status_opt = 1;
+ }
+ else if ( strcmp( "no-output", arg->string ) == 0 )
+ {
+ no_output_opt = 1;
+ }
+ else if ( strcmp("strip-eol", arg->string) == 0 )
+ {
+ strip_eol_opt = 1;
+ }
+ arg = lol_get( frame->args, ++a );
+ }
+ }
+
+ /* The following fflush() call seems to be indicated as a workaround for a
+ * popen() bug on POSIX implementations related to synhronizing input
+ * stream positions for the called and the calling process.
+ */
+ fflush( NULL );
+
+ p = popen( command->string, "r" );
+ if ( p == NULL )
+ return L0;
+
+ string_new( &s );
+
+ while ( ( ret = fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p ) ) > 0 )
+ {
+ buffer[ret] = 0;
+ if ( !no_output_opt )
+ {
+ if ( strip_eol_opt )
+ rtrim(buffer);
+ string_append( &s, buffer );
+ }
+ }
+
+ exit_status = pclose( p );
+
+ /* The command output is returned first. */
+ result = list_new( L0, newstr( s.value ) );
+ string_free( &s );
+
+ /* The command exit result next. */
+ if ( exit_status_opt )
+ {
+ if ( WIFEXITED(exit_status) )
+ exit_status = WEXITSTATUS(exit_status);
+ else
+ exit_status = -1;
+ sprintf( buffer, "%d", exit_status );
+ result = list_new( result, newstr( buffer ) );
+ }
+
+ return result;
+}
+
+#else /* #ifdef HAVE_POPEN */
+
+LIST * builtin_shell( PARSE * parse, FRAME * frame )
+{
+ return L0;
+}
+
+#endif /* #ifdef HAVE_POPEN */
diff --git a/jam-files/engine/builtins.h b/jam-files/engine/builtins.h
new file mode 100644
index 000000000..5fed07c96
--- /dev/null
+++ b/jam-files/engine/builtins.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#ifndef JAM_BUILTINS_H
+# define JAM_BUILTINS_H
+
+# include "frames.h"
+
+/*
+ * builtins.h - compile parsed jam statements
+ */
+
+void load_builtins();
+void init_set();
+void init_path();
+void init_regex();
+void init_property_set();
+void init_sequence();
+void init_order();
+
+LIST *builtin_calc( PARSE *parse, FRAME *args );
+LIST *builtin_depends( PARSE *parse, FRAME *args );
+LIST *builtin_rebuilds( PARSE *parse, FRAME *args );
+LIST *builtin_echo( PARSE *parse, FRAME *args );
+LIST *builtin_exit( PARSE *parse, FRAME *args );
+LIST *builtin_flags( PARSE *parse, FRAME *args );
+LIST *builtin_glob( PARSE *parse, FRAME *args );
+LIST *builtin_glob_recursive( PARSE *parse, FRAME *frame );
+LIST *builtin_subst( PARSE *parse, FRAME *args );
+LIST *builtin_match( PARSE *parse, FRAME *args );
+LIST *builtin_split_by_characters( PARSE *parse, FRAME *args );
+LIST *builtin_hdrmacro( PARSE *parse, FRAME *args );
+LIST *builtin_rulenames( PARSE *parse, FRAME *args );
+LIST *builtin_varnames( PARSE *parse, FRAME *args );
+LIST *builtin_delete_module( PARSE *parse, FRAME *args );
+LIST *builtin_import( PARSE *parse, FRAME *args );
+LIST *builtin_export( PARSE *parse, FRAME *args );
+LIST *builtin_caller_module( PARSE *parse, FRAME *args );
+LIST *builtin_backtrace( PARSE *parse, FRAME *args );
+LIST *builtin_pwd( PARSE *parse, FRAME *args );
+LIST *builtin_update( PARSE *parse, FRAME *args );
+LIST *builtin_update_now( PARSE *parse, FRAME *args );
+LIST *builtin_search_for_target( PARSE *parse, FRAME *args );
+LIST *builtin_import_module( PARSE *parse, FRAME *args );
+LIST *builtin_imported_modules( PARSE *parse, FRAME *frame );
+LIST *builtin_instance( PARSE *parse, FRAME *frame );
+LIST *builtin_sort( PARSE *parse, FRAME *frame );
+LIST *builtin_normalize_path( PARSE *parse, FRAME *frame );
+LIST *builtin_native_rule( PARSE *parse, FRAME *frame );
+LIST *builtin_has_native_rule( PARSE *parse, FRAME *frame );
+LIST *builtin_user_module( PARSE *parse, FRAME *frame );
+LIST *builtin_nearest_user_location( PARSE *parse, FRAME *frame );
+LIST *builtin_check_if_file( PARSE *parse, FRAME *frame );
+LIST *builtin_python_import_rule( PARSE *parse, FRAME *frame );
+LIST *builtin_shell( PARSE *parse, FRAME *frame );
+LIST *builtin_md5( PARSE *parse, FRAME *frame );
+LIST *builtin_file_open( PARSE *parse, FRAME *frame );
+LIST *builtin_pad( PARSE *parse, FRAME *frame );
+LIST *builtin_precious( PARSE *parse, FRAME *frame );
+LIST *builtin_self_path( PARSE *parse, FRAME *frame );
+LIST *builtin_makedir( PARSE *parse, FRAME *frame );
+
+void backtrace( FRAME *frame );
+extern int last_update_now_status;
+
+#endif
diff --git a/jam-files/engine/bump_version.py b/jam-files/engine/bump_version.py
new file mode 100644
index 000000000..9423c4c77
--- /dev/null
+++ b/jam-files/engine/bump_version.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+# This script is used to bump version of bjam. It takes a single argument, e.g
+#
+# ./bump_version.py 3.1.9
+#
+# and updates all necessary files. For the time being, it's assumes presense
+# of 'perl' executable and Debian-specific 'dch' executable.
+#
+
+
+import os
+import os.path
+import re
+import string
+import sys
+
+srcdir = os.path.abspath(os.path.dirname(__file__ ))
+docdir = os.path.abspath(os.path.join(srcdir,"..","doc"))
+
+def edit(file,replacements):
+ print " '%s'..." %(file)
+ text = open(file,'r').read()
+ while len(replacements) > 0:
+ #~ print " '%s' ==> '%s'" % (replacements[0],replacements[1])
+ text = re.compile(replacements[0],re.M).subn(replacements[1],text)[0]
+ replacements = replacements[2:]
+ #~ print text
+ open(file,'w').write(text)
+
+def make_edits(version):
+ edit(os.path.join(srcdir,"boost-jam.spec"), [
+ '^Version:.*$','Version: %s' % string.join(version, "."),
+ ])
+
+ edit(os.path.join(srcdir,"build.jam"), [
+ '^_VERSION_ = .* ;$','_VERSION_ = %s %s %s ;' % (version[0], version[1], version[2]),
+ ])
+
+ edit(os.path.join(docdir,"bjam.qbk"), [
+ '\[version.*\]','[version: %s]' % string.join(version, '.'),
+ '\[def :version:.*\]','[def :version: %s]' % string.join(version, '.'),
+ ])
+
+ edit(os.path.join(srcdir,"patchlevel.h"), [
+ '^#define VERSION_MAJOR .*$',
+ '#define VERSION_MAJOR %s' % (version[0]),
+ '^#define VERSION_MINOR .*$',
+ '#define VERSION_MINOR %s' % (version[1]),
+ '^#define VERSION_PATCH .*$',
+ '#define VERSION_PATCH %s' % (version[2]),
+ '^#define VERSION_MAJOR_SYM .*$',
+ '#define VERSION_MAJOR_SYM "0%s"' % (version[0]),
+ '^#define VERSION_MINOR_SYM .*$',
+ '#define VERSION_MINOR_SYM "%s"' % (version[1]),
+ '^#define VERSION_PATCH_SYM .*$',
+ '#define VERSION_PATCH_SYM "%s"' % (version[2]),
+ '^#define VERSION .*$',
+ '#define VERSION "%s"' % string.join(version, '.'),
+ '^#define JAMVERSYM .*$',
+ '#define JAMVERSYM "JAMVERSION=%s.%s"' % (version[0],version[1]),
+ ])
+
+def main():
+
+ if len(sys.argv) < 2:
+ print "Expect new version as argument"
+ sys.exit(1)
+
+ version = string.split(sys.argv[1], ".")
+ print "Setting version to", version
+ make_edits(version)
+
+if __name__ == '__main__':
+ main()
+
+#~ Copyright 2006 Rene Rivera.
+#~ Copyright 2005-2006 Vladimir Prus.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/jam-files/engine/class.c b/jam-files/engine/class.c
new file mode 100644
index 000000000..ff4ec5680
--- /dev/null
+++ b/jam-files/engine/class.c
@@ -0,0 +1,141 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "class.h"
+#include "strings.h"
+#include "variable.h"
+#include "frames.h"
+#include "rules.h"
+#include "newstr.h"
+
+#include "hash.h"
+
+
+static struct hash * classes = 0;
+
+
+static void check_defined( LIST * class_names )
+{
+ for ( ; class_names; class_names = class_names->next )
+ {
+ char * * p = &class_names->string;
+ if ( !hashcheck( classes, (HASHDATA * *)&p ) )
+ {
+ printf( "Class %s is not defined\n", class_names->string );
+ abort();
+ }
+ }
+}
+
+
+static char * class_module_name( char * declared_name )
+{
+ string name[ 1 ];
+ char * result;
+
+ string_new( name );
+ string_append( name, "class@" );
+ string_append( name, declared_name );
+
+ result = newstr( name->value );
+ string_free( name );
+
+ return result;
+}
+
+
+struct import_base_data
+{
+ char * base_name;
+ module_t * base_module;
+ module_t * class_module;
+};
+
+
+static void import_base_rule( void * r_, void * d_ )
+{
+ RULE * r = (RULE *)r_;
+ RULE * ir1;
+ RULE * ir2;
+ struct import_base_data * d = (struct import_base_data *)d_;
+ string qualified_name[ 1 ];
+
+ string_new ( qualified_name );
+ string_append ( qualified_name, d->base_name );
+ string_push_back( qualified_name, '.' );
+ string_append ( qualified_name, r->name );
+
+ ir1 = import_rule( r, d->class_module, r->name );
+ ir2 = import_rule( r, d->class_module, qualified_name->value );
+
+ /* Copy 'exported' flag. */
+ ir1->exported = ir2->exported = r->exported;
+
+ /* If we are importing a class method, localize it. */
+ if ( ( r->module == d->base_module ) || ( r->module->class_module &&
+ ( r->module->class_module == d->base_module ) ) )
+ ir1->module = ir2->module = d->class_module;
+
+ string_free( qualified_name );
+}
+
+
+/*
+ * For each exported rule 'n', declared in class module for base, imports that
+ * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
+ * marked as exported.
+ */
+
+static void import_base_rules( module_t * class, char * base )
+{
+ module_t * base_module = bindmodule( class_module_name( base ) );
+ struct import_base_data d;
+ d.base_name = base;
+ d.base_module = base_module;
+ d.class_module = class;
+
+ if ( base_module->rules )
+ hashenumerate( base_module->rules, import_base_rule, &d );
+
+ import_module( imported_modules( base_module ), class );
+}
+
+
+char * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
+{
+ char * name = class_module_name( xname->string );
+ char * * pp = &xname->string;
+ module_t * class_module = 0;
+ module_t * outer_module = frame->module;
+
+ if ( !classes )
+ classes = hashinit( sizeof( char * ), "classes" );
+
+ if ( hashcheck( classes, (HASHDATA * *)&pp ) )
+ {
+ printf( "Class %s already defined\n", xname->string );
+ abort();
+ }
+ else
+ {
+ hashenter( classes, (HASHDATA * *)&pp );
+ }
+ check_defined( bases );
+
+ class_module = bindmodule( name );
+
+ exit_module( outer_module );
+ enter_module( class_module );
+
+ var_set( "__name__", xname, VAR_SET );
+ var_set( "__bases__", bases, VAR_SET );
+
+ exit_module( class_module );
+ enter_module( outer_module );
+
+ for ( ; bases; bases = bases->next )
+ import_base_rules( class_module, bases->string );
+
+ return name;
+}
diff --git a/jam-files/engine/class.h b/jam-files/engine/class.h
new file mode 100644
index 000000000..f7faeff62
--- /dev/null
+++ b/jam-files/engine/class.h
@@ -0,0 +1,13 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#ifndef CLASS_H_VP_2003_08_01
+#define CLASS_H_VP_2003_08_01
+
+#include "lists.h"
+#include "frames.h"
+
+char* make_class_module(LIST* xname, LIST* bases, FRAME* frame);
+
+#endif
diff --git a/jam-files/engine/command.c b/jam-files/engine/command.c
new file mode 100644
index 000000000..d2ea06814
--- /dev/null
+++ b/jam-files/engine/command.c
@@ -0,0 +1,100 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * command.c - maintain lists of commands
+ */
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "variable.h"
+#include "rules.h"
+
+#include "command.h"
+#include <limits.h>
+#include <string.h>
+
+
+/*
+ * cmd_new() - return a new CMD or 0 if too many args
+ */
+
+CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
+{
+ CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
+ /* Lift line-length limitation entirely when JAMSHELL is just "%". */
+ int no_limit = ( shell && !strcmp(shell->string,"%") && !list_next(shell) );
+ int max_line = MAXLINE;
+ int allocated = -1;
+
+ cmd->rule = rule;
+ cmd->shell = shell;
+ cmd->next = 0;
+
+ lol_init( &cmd->args );
+ lol_add( &cmd->args, targets );
+ lol_add( &cmd->args, sources );
+ cmd->buf = 0;
+
+ do
+ {
+ BJAM_FREE( cmd->buf ); /* free any buffer from previous iteration */
+
+ cmd->buf = (char*)BJAM_MALLOC_ATOMIC( max_line + 1 );
+
+ if ( cmd->buf == 0 )
+ break;
+
+ allocated = var_string( rule->actions->command, cmd->buf, max_line, &cmd->args );
+
+ max_line = max_line * 2;
+ }
+ while ( ( allocated < 0 ) && ( max_line < INT_MAX / 2 ) );
+
+ if ( !no_limit )
+ {
+ /* Bail if the result will not fit in MAXLINE. */
+ char * s = cmd->buf;
+ while ( *s )
+ {
+ size_t l = strcspn( s, "\n" );
+
+ if ( l > MAXLINE )
+ {
+ /* We do not free targets/sources/shell if bailing. */
+ cmd_free( cmd );
+ return 0;
+ }
+
+ s += l;
+ if ( *s )
+ ++s;
+ }
+ }
+
+ return cmd;
+}
+
+
+/*
+ * cmd_free() - free a CMD
+ */
+
+void cmd_free( CMD * cmd )
+{
+ lol_free( &cmd->args );
+ list_free( cmd->shell );
+ BJAM_FREE( cmd->buf );
+ BJAM_FREE( (char *)cmd );
+}
diff --git a/jam-files/engine/command.h b/jam-files/engine/command.h
new file mode 100644
index 000000000..ddd38e689
--- /dev/null
+++ b/jam-files/engine/command.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 1994 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * command.h - the CMD structure and routines to manipulate them
+ *
+ * Both ACTION and CMD contain a rule, targets, and sources. An
+ * ACTION describes a rule to be applied to the given targets and
+ * sources; a CMD is what actually gets executed by the shell. The
+ * differences are due to:
+ *
+ * ACTIONS must be combined if 'actions together' is given.
+ * ACTIONS must be split if 'actions piecemeal' is given.
+ * ACTIONS must have current sources omitted for 'actions updated'.
+ *
+ * The CMD datatype holds a single command that is to be executed
+ * against a target, and they can chain together to represent the
+ * full collection of commands used to update a target.
+ *
+ * Structures:
+ *
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ *
+ * External routines:
+ *
+ * cmd_new() - return a new CMD or 0 if too many args.
+ * cmd_free() - delete CMD and its parts.
+ * cmd_next() - walk the CMD chain.
+ */
+
+
+/*
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ */
+
+typedef struct _cmd CMD;
+
+struct _cmd
+{
+ CMD * next;
+ CMD * tail; /* valid on in head */
+ RULE * rule; /* rule->actions contains shell script */
+ LIST * shell; /* $(SHELL) value */
+ LOL args; /* LISTs for $(<), $(>) */
+ char * buf; /* actual commands */
+};
+
+CMD * cmd_new
+(
+ RULE * rule, /* rule (referenced) */
+ LIST * targets, /* $(<) (freed) */
+ LIST * sources, /* $(>) (freed) */
+ LIST * shell /* $(SHELL) (freed) */
+);
+
+void cmd_free( CMD * );
+
+#define cmd_next( c ) ( ( c )->next )
diff --git a/jam-files/engine/compile.c b/jam-files/engine/compile.c
new file mode 100644
index 000000000..2c049aae5
--- /dev/null
+++ b/jam-files/engine/compile.c
@@ -0,0 +1,1424 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+
+# include "lists.h"
+# include "parse.h"
+# include "compile.h"
+# include "variable.h"
+# include "expand.h"
+# include "rules.h"
+# include "newstr.h"
+# include "make.h"
+# include "search.h"
+# include "hdrmacro.h"
+# include "hash.h"
+# include "modules.h"
+# include "strings.h"
+# include "builtins.h"
+# include "class.h"
+
+# include <assert.h>
+# include <string.h>
+# include <stdarg.h>
+
+/*
+ * compile.c - compile parsed jam statements
+ *
+ * External routines:
+ *
+ * compile_append() - append list results of two statements
+ * compile_eval() - evaluate if to determine which leg to compile
+ * compile_foreach() - compile the "for x in y" statement
+ * compile_if() - compile 'if' rule
+ * compile_while() - compile 'while' rule
+ * compile_include() - support for 'include' - call include() on file
+ * compile_list() - expand and return a list
+ * compile_local() - declare (and set) local variables
+ * compile_null() - do nothing -- a stub for parsing
+ * compile_on() - run rule under influence of on-target variables
+ * compile_rule() - compile a single user defined rule
+ * compile_rules() - compile a chain of rules
+ * compile_set() - compile the "set variable" statement
+ * compile_setcomp() - support for `rule` - save parse tree
+ * compile_setexec() - support for `actions` - save execution string
+ * compile_settings() - compile the "on =" (set variable on exec) statement
+ * compile_switch() - compile 'switch' rule
+ *
+ * Internal routines:
+ *
+ * debug_compile() - printf with indent to show rule expansion.
+ * evaluate_rule() - execute a rule invocation
+ *
+ * builtin_depends() - DEPENDS/INCLUDES rule
+ * builtin_echo() - ECHO rule
+ * builtin_exit() - EXIT rule
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
+ *
+ * 02/03/94 (seiwald) - Changed trace output to read "setting" instead of
+ * the awkward sounding "settings".
+ * 04/12/94 (seiwald) - Combined build_depends() with build_includes().
+ * 04/12/94 (seiwald) - actionlist() now just appends a single action.
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 05/13/94 (seiwald) - include files are now bound as targets, and thus
+ * can make use of $(SEARCH)
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
+ * 01/22/95 (seiwald) - Exit rule.
+ * 02/02/95 (seiwald) - Always rule; LEAVES rule.
+ * 02/14/95 (seiwald) - NoUpdate rule.
+ * 09/11/00 (seiwald) - new evaluate_rule() for headers().
+ * 09/11/00 (seiwald) - compile_xxx() now return LIST *.
+ * New compile_append() and compile_list() in
+ * support of building lists here, rather than
+ * in jamgram.yy.
+ * 01/10/00 (seiwald) - built-ins split out to builtin.c.
+ */
+
+static void debug_compile( int which, char *s, FRAME* frame );
+int glob( char *s, char *c );
+/* Internal functions from builtins.c */
+void backtrace( FRAME *frame );
+void backtrace_line( FRAME *frame );
+void print_source_line( PARSE* p );
+
+struct frame * frame_before_python_call;
+
+void frame_init( FRAME* frame )
+{
+ frame->prev = 0;
+ frame->prev_user = 0;
+ lol_init(frame->args);
+ frame->module = root_module();
+ frame->rulename = "module scope";
+ frame->procedure = 0;
+}
+
+
+void frame_free( FRAME* frame )
+{
+ lol_free( frame->args );
+}
+
+
+/*
+ * compile_append() - append list results of two statements
+ *
+ * parse->left more compile_append() by left-recursion
+ * parse->right single rule
+ */
+
+LIST * compile_append( PARSE * parse, FRAME * frame )
+{
+ /* Append right to left. */
+ return list_append(
+ parse_evaluate( parse->left, frame ),
+ parse_evaluate( parse->right, frame ) );
+}
+
+
+/*
+ * compile_eval() - evaluate if to determine which leg to compile
+ *
+ * Returns:
+ * list if expression true - compile 'then' clause
+ * L0 if expression false - compile 'else' clause
+ */
+
+static int lcmp( LIST * t, LIST * s )
+{
+ int status = 0;
+
+ while ( !status && ( t || s ) )
+ {
+ char *st = t ? t->string : "";
+ char *ss = s ? s->string : "";
+
+ status = strcmp( st, ss );
+
+ t = t ? list_next( t ) : t;
+ s = s ? list_next( s ) : s;
+ }
+
+ return status;
+}
+
+LIST * compile_eval( PARSE * parse, FRAME * frame )
+{
+ LIST * ll;
+ LIST * lr;
+ LIST * s;
+ LIST * t;
+ int status = 0;
+
+ /* Short circuit lr eval for &&, ||, and 'in'. */
+
+ ll = parse_evaluate( parse->left, frame );
+ lr = 0;
+
+ switch ( parse->num )
+ {
+ case EXPR_AND:
+ case EXPR_IN : if ( ll ) goto eval; break;
+ case EXPR_OR : if ( !ll ) goto eval; break;
+ default: eval: lr = parse_evaluate( parse->right, frame );
+ }
+
+ /* Now eval. */
+ switch ( parse->num )
+ {
+ case EXPR_NOT: if ( !ll ) status = 1; break;
+ case EXPR_AND: if ( ll && lr ) status = 1; break;
+ case EXPR_OR : if ( ll || lr ) status = 1; break;
+
+ case EXPR_IN:
+ /* "a in b": make sure each of ll is equal to something in lr. */
+ for ( t = ll; t; t = list_next( t ) )
+ {
+ for ( s = lr; s; s = list_next( s ) )
+ if ( !strcmp( t->string, s->string ) )
+ break;
+ if ( !s ) break;
+ }
+ /* No more ll? Success. */
+ if ( !t ) status = 1;
+ break;
+
+ case EXPR_EXISTS: if ( lcmp( ll, L0 ) != 0 ) status = 1; break;
+ case EXPR_EQUALS: if ( lcmp( ll, lr ) == 0 ) status = 1; break;
+ case EXPR_NOTEQ : if ( lcmp( ll, lr ) != 0 ) status = 1; break;
+ case EXPR_LESS : if ( lcmp( ll, lr ) < 0 ) status = 1; break;
+ case EXPR_LESSEQ: if ( lcmp( ll, lr ) <= 0 ) status = 1; break;
+ case EXPR_MORE : if ( lcmp( ll, lr ) > 0 ) status = 1; break;
+ case EXPR_MOREEQ: if ( lcmp( ll, lr ) >= 0 ) status = 1; break;
+ }
+
+ if ( DEBUG_IF )
+ {
+ debug_compile( 0, "if", frame );
+ list_print( ll );
+ printf( "(%d) ", status );
+ list_print( lr );
+ printf( "\n" );
+ }
+
+ /* Find something to return. */
+ /* In odd circumstances (like "" = "") */
+ /* we'll have to return a new string. */
+
+ if ( !status ) t = 0;
+ else if ( ll ) t = ll, ll = 0;
+ else if ( lr ) t = lr, lr = 0;
+ else t = list_new( L0, newstr( "1" ) );
+
+ if ( ll ) list_free( ll );
+ if ( lr ) list_free( lr );
+ return t;
+}
+
+
+/*
+ * compile_foreach() - compile the "for x in y" statement
+ *
+ * Compile_foreach() resets the given variable name to each specified
+ * value, executing the commands enclosed in braces for each iteration.
+ *
+ * parse->string index variable
+ * parse->left variable values
+ * parse->right rule to compile
+ */
+
+LIST * compile_foreach( PARSE * parse, FRAME * frame )
+{
+ LIST * nv = parse_evaluate( parse->left, frame );
+ LIST * l;
+ SETTINGS * s = 0;
+
+ if ( parse->num )
+ {
+ s = addsettings( s, VAR_SET, parse->string, L0 );
+ pushsettings( s );
+ }
+
+ /* Call var_set to reset $(parse->string) for each val. */
+
+ for ( l = nv; l; l = list_next( l ) )
+ {
+ LIST * val = list_new( L0, copystr( l->string ) );
+ var_set( parse->string, val, VAR_SET );
+ list_free( parse_evaluate( parse->right, frame ) );
+ }
+
+ if ( parse->num )
+ {
+ popsettings( s );
+ freesettings( s );
+ }
+
+ list_free( nv );
+
+ return L0;
+}
+
+/*
+ * compile_if() - compile 'if' rule
+ *
+ * parse->left condition tree
+ * parse->right then tree
+ * parse->third else tree
+ */
+
+LIST * compile_if( PARSE * p, FRAME * frame )
+{
+ LIST * l = parse_evaluate( p->left, frame );
+ if ( l )
+ {
+ list_free( l );
+ return parse_evaluate( p->right, frame );
+ }
+ return parse_evaluate( p->third, frame );
+}
+
+
+LIST * compile_while( PARSE * p, FRAME * frame )
+{
+ LIST * r = 0;
+ LIST * l;
+ while ( ( l = parse_evaluate( p->left, frame ) ) )
+ {
+ list_free( l );
+ if ( r ) list_free( r );
+ r = parse_evaluate( p->right, frame );
+ }
+ return r;
+}
+
+
+/*
+ * compile_include() - support for 'include' - call include() on file
+ *
+ * parse->left list of files to include (can only do 1)
+ */
+
+LIST * compile_include( PARSE * parse, FRAME * frame )
+{
+ LIST * nt = parse_evaluate( parse->left, frame );
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "include", frame);
+ list_print( nt );
+ printf( "\n" );
+ }
+
+ if ( nt )
+ {
+ TARGET * t = bindtarget( nt->string );
+
+ /* DWA 2001/10/22 - Perforce Jam cleared the arguments here, which
+ * prevents an included file from being treated as part of the body of a
+ * rule. I did not see any reason to do that, so I lifted the
+ * restriction.
+ */
+
+ /* Bind the include file under the influence of */
+ /* "on-target" variables. Though they are targets, */
+ /* include files are not built with make(). */
+
+ pushsettings( t->settings );
+ /* We don't expect that file to be included is generated by some
+ action. Therefore, pass 0 as third argument.
+ If the name resolves to directory, let it error out. */
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( t->settings );
+
+ parse_file( t->boundname, frame );
+ }
+
+ list_free( nt );
+
+ return L0;
+}
+
+static LIST* evaluate_in_module ( char* module_name, PARSE * p, FRAME* frame)
+{
+ LIST* result;
+
+ module_t* outer_module = frame->module;
+ frame->module = module_name ? bindmodule( module_name ) : root_module();
+
+ if ( outer_module != frame->module )
+ {
+ exit_module( outer_module );
+ enter_module( frame->module );
+ }
+
+ result = parse_evaluate( p, frame );
+
+ if ( outer_module != frame->module )
+ {
+ exit_module( frame->module );
+ enter_module( outer_module );
+ frame->module = outer_module;
+ }
+
+ return result;
+}
+
+
+LIST * compile_module( PARSE * p, FRAME * frame )
+{
+ /* Here we are entering a module declaration block. */
+ LIST * module_name = parse_evaluate( p->left, frame );
+ LIST * result = evaluate_in_module( module_name ? module_name->string : 0,
+ p->right, frame );
+ list_free( module_name );
+ return result;
+}
+
+
+LIST * compile_class( PARSE * p, FRAME * frame )
+{
+ /** Todo: check for empty class name.
+ Check for class redeclaration. */
+
+ char * class_module = 0;
+
+ LIST * name = parse_evaluate( p->left->right, frame );
+ LIST * bases = 0;
+
+ if ( p->left->left )
+ bases = parse_evaluate( p->left->left->right, frame );
+
+ class_module = make_class_module( name, bases, frame );
+ evaluate_in_module( class_module, p->right, frame );
+
+ return L0;
+}
+
+
+/*
+ * compile_list() - expand and return a list.
+ *
+ * parse->string - character string to expand.
+ */
+
+LIST * compile_list( PARSE * parse, FRAME * frame )
+{
+ /* s is a copyable string */
+ char * s = parse->string;
+ return var_expand( L0, s, s + strlen( s ), frame->args, 1 );
+}
+
+
+/*
+ * compile_local() - declare (and set) local variables.
+ *
+ * parse->left list of variables
+ * parse->right list of values
+ * parse->third rules to execute
+ */
+
+LIST * compile_local( PARSE * parse, FRAME * frame )
+{
+ LIST * l;
+ SETTINGS * s = 0;
+ LIST * nt = parse_evaluate( parse->left, frame );
+ LIST * ns = parse_evaluate( parse->right, frame );
+ LIST * result;
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "local", frame );
+ list_print( nt );
+ printf( " = " );
+ list_print( ns );
+ printf( "\n" );
+ }
+
+ /* Initial value is ns. */
+ for ( l = nt; l; l = list_next( l ) )
+ s = addsettings( s, VAR_SET, l->string, list_copy( (LIST *)0, ns ) );
+
+ list_free( ns );
+ list_free( nt );
+
+ /* Note that callees of the current context get this "local" variable,
+ * making it not so much local as layered.
+ */
+
+ pushsettings( s );
+ result = parse_evaluate( parse->third, frame );
+ popsettings( s );
+
+ freesettings( s );
+
+ return result;
+}
+
+
+/*
+ * compile_null() - do nothing -- a stub for parsing.
+ */
+
+LIST * compile_null( PARSE * parse, FRAME * frame )
+{
+ return L0;
+}
+
+
+/*
+ * compile_on() - run rule under influence of on-target variables
+ *
+ * parse->left list of files to include (can only do 1).
+ * parse->right rule to run.
+ *
+ * EXPERIMENTAL!
+ */
+
+LIST * compile_on( PARSE * parse, FRAME * frame )
+{
+ LIST * nt = parse_evaluate( parse->left, frame );
+ LIST * result = 0;
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "on", frame );
+ list_print( nt );
+ printf( "\n" );
+ }
+
+ if ( nt )
+ {
+ TARGET * t = bindtarget( nt->string );
+ pushsettings( t->settings );
+ result = parse_evaluate( parse->right, frame );
+ popsettings( t->settings );
+ }
+
+ list_free( nt );
+
+ return result;
+}
+
+
+/*
+ * compile_rule() - compile a single user defined rule.
+ *
+ * parse->string name of user defined rule.
+ * parse->left parameters (list of lists) to rule, recursing left.
+ *
+ * Wrapped around evaluate_rule() so that headers() can share it.
+ */
+
+LIST * compile_rule( PARSE * parse, FRAME * frame )
+{
+ FRAME inner[ 1 ];
+ LIST * result;
+ PARSE * p;
+
+ /* Build up the list of arg lists. */
+ frame_init( inner );
+ inner->prev = frame;
+ inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
+ inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
+ inner->procedure = parse;
+ /* Special-case LOL of length 1 where the first list is totally empty.
+ This is created when calling functions with no parameters, due to
+ the way jam grammar is written. This is OK when one jam function
+ calls another, but really not good when Jam function calls Python. */
+ if ( parse->left->left == NULL && parse->left->right->func == compile_null)
+ ;
+ else
+ for ( p = parse->left; p; p = p->left )
+ lol_add( inner->args, parse_evaluate( p->right, frame ) );
+
+ /* And invoke the rule. */
+ result = evaluate_rule( parse->string, inner );
+ frame_free( inner );
+ return result;
+}
+
+
+static void argument_error( char * message, RULE * rule, FRAME * frame, LIST* arg )
+{
+ LOL * actual = frame->args;
+ assert( frame->procedure != 0 );
+ backtrace_line( frame->prev );
+ printf( "*** argument error\n* rule %s ( ", frame->rulename );
+ lol_print( rule->arguments->data );
+ printf( " )\n* called with: ( " );
+ lol_print( actual );
+ printf( " )\n* %s %s\n", message, arg ? arg->string : "" );
+ print_source_line( rule->procedure );
+ printf( "see definition of rule '%s' being called\n", rule->name );
+ backtrace( frame->prev );
+ exit( 1 );
+}
+
+
+/* Define delimiters for type check elements in argument lists (and return type
+ * specifications, eventually).
+ */
+# define TYPE_OPEN_DELIM '['
+# define TYPE_CLOSE_DELIM ']'
+
+/*
+ * is_type_name() - true iff the given string represents a type check
+ * specification.
+ */
+
+static int is_type_name( char * s )
+{
+ return ( s[ 0 ] == TYPE_OPEN_DELIM ) &&
+ ( s[ strlen( s ) - 1 ] == TYPE_CLOSE_DELIM );
+}
+
+
+/*
+ * arg_modifier - if the next element of formal is a single character, return
+ * that; return 0 otherwise. Used to extract "*+?" modifiers * from argument
+ * lists.
+ */
+
+static char arg_modifier( LIST * formal )
+{
+ if ( formal->next )
+ {
+ char * next = formal->next->string;
+ if ( next && ( next[ 0 ] != 0 ) && ( next[ 1 ] == 0 ) )
+ return next[ 0 ];
+ }
+ return 0;
+}
+
+
+/*
+ * type_check() - checks that each element of values satisfies the requirements
+ * of type_name.
+ *
+ * caller - the frame of the rule calling the rule whose arguments are
+ * being checked
+ *
+ * called - the rule being called
+ *
+ * arg_name - a list element containing the name of the argument being
+ * checked
+ */
+
+static void type_check
+(
+ char * type_name,
+ LIST * values,
+ FRAME * caller,
+ RULE * called,
+ LIST * arg_name
+)
+{
+ static module_t * typecheck = 0;
+
+ /* If nothing to check, bail now. */
+ if ( !values || !type_name )
+ return;
+
+ if ( !typecheck )
+ typecheck = bindmodule( ".typecheck" );
+
+ /* If the checking rule can not be found, also bail. */
+ {
+ RULE checker_, *checker = &checker_;
+
+ checker->name = type_name;
+ if ( !typecheck->rules || !hashcheck( typecheck->rules, (HASHDATA * *)&checker ) )
+ return;
+ }
+
+ exit_module( caller->module );
+
+ while ( values != 0 )
+ {
+ LIST *error;
+ FRAME frame[1];
+ frame_init( frame );
+ frame->module = typecheck;
+ frame->prev = caller;
+ frame->prev_user = caller->module->user_module ? caller : caller->prev_user;
+
+ enter_module( typecheck );
+ /* Prepare the argument list */
+ lol_add( frame->args, list_new( L0, values->string ) );
+ error = evaluate_rule( type_name, frame );
+
+ exit_module( typecheck );
+
+ if ( error )
+ argument_error( error->string, called, caller, arg_name );
+
+ frame_free( frame );
+ values = values->next;
+ }
+
+ enter_module( caller->module );
+}
+
+/*
+ * collect_arguments() - local argument checking and collection
+ */
+static SETTINGS *
+collect_arguments( RULE* rule, FRAME* frame )
+{
+ SETTINGS *locals = 0;
+
+ LOL * all_actual = frame->args;
+ LOL * all_formal = rule->arguments ? rule->arguments->data : 0;
+ if ( all_formal ) /* Nothing to set; nothing to check */
+ {
+ int max = all_formal->count > all_actual->count
+ ? all_formal->count
+ : all_actual->count;
+
+ int n;
+ for ( n = 0; n < max ; ++n )
+ {
+ LIST *actual = lol_get( all_actual, n );
+ char *type_name = 0;
+
+ LIST *formal;
+ for ( formal = lol_get( all_formal, n ); formal; formal = formal->next )
+ {
+ char* name = formal->string;
+
+ if ( is_type_name(name) )
+ {
+ if ( type_name )
+ argument_error( "missing argument name before type name:", rule, frame, formal );
+
+ if ( !formal->next )
+ argument_error( "missing argument name after type name:", rule, frame, formal );
+
+ type_name = formal->string;
+ }
+ else
+ {
+ LIST* value = 0;
+ char modifier;
+ LIST* arg_name = formal; /* hold the argument name for type checking */
+ int multiple = 0;
+
+ /* Stop now if a variable number of arguments are specified */
+ if ( name[0] == '*' && name[1] == 0 )
+ return locals;
+
+ modifier = arg_modifier( formal );
+
+ if ( !actual && modifier != '?' && modifier != '*' )
+ argument_error( "missing argument", rule, frame, formal );
+
+ switch ( modifier )
+ {
+ case '+':
+ case '*':
+ value = list_copy( 0, actual );
+ multiple = 1;
+ actual = 0;
+ /* skip an extra element for the modifier */
+ formal = formal->next;
+ break;
+ case '?':
+ /* skip an extra element for the modifier */
+ formal = formal->next;
+ /* fall through */
+ default:
+ if ( actual ) /* in case actual is missing */
+ {
+ value = list_new( 0, actual->string );
+ actual = actual->next;
+ }
+ }
+
+ locals = addsettings(locals, VAR_SET, name, value);
+ locals->multiple = multiple;
+ type_check( type_name, value, frame, rule, arg_name );
+ type_name = 0;
+ }
+ }
+
+ if ( actual )
+ {
+ argument_error( "extra argument", rule, frame, actual );
+ }
+ }
+ }
+ return locals;
+}
+
+RULE *
+enter_rule( char *rulename, module_t *target_module );
+
+#ifdef HAVE_PYTHON
+
+static int python_instance_number = 0;
+
+
+/* Given a Python object, return a string to use in Jam
+ code instead of said object.
+ If the object is string, use the string value
+ If the object implemenets __jam_repr__ method, use that.
+ Otherwise return 0.
+
+ The result value is newstr-ed. */
+char *python_to_string(PyObject* value)
+{
+ if (PyString_Check(value))
+ {
+ return newstr(PyString_AsString(value));
+ }
+ else
+ {
+ /* See if this is an instance that defines special __jam_repr__
+ method. */
+ if (PyInstance_Check(value)
+ && PyObject_HasAttrString(value, "__jam_repr__"))
+ {
+ PyObject* repr = PyObject_GetAttrString(value, "__jam_repr__");
+ if (repr)
+ {
+ PyObject* arguments2 = PyTuple_New(0);
+ PyObject* value2 = PyObject_Call(repr, arguments2, 0);
+ Py_DECREF(repr);
+ Py_DECREF(arguments2);
+ if (PyString_Check(value2))
+ {
+ return newstr(PyString_AsString(value2));
+ }
+ Py_DECREF(value2);
+ }
+ }
+ return 0;
+ }
+}
+
+static LIST*
+call_python_function(RULE* r, FRAME* frame)
+{
+ LIST * result = 0;
+ PyObject * arguments = 0;
+ PyObject * kw = NULL;
+ int i ;
+ PyObject * py_result;
+
+ if (r->arguments)
+ {
+ SETTINGS * args;
+
+ arguments = PyTuple_New(0);
+ kw = PyDict_New();
+
+ for (args = collect_arguments(r, frame); args; args = args->next)
+ {
+ PyObject *key = PyString_FromString(args->symbol);
+ PyObject *value = 0;
+ if (args->multiple)
+ value = list_to_python(args->value);
+ else {
+ if (args->value)
+ value = PyString_FromString(args->value->string);
+ }
+
+ if (value)
+ PyDict_SetItem(kw, key, value);
+ Py_DECREF(key);
+ Py_XDECREF(value);
+ }
+ }
+ else
+ {
+ arguments = PyTuple_New( frame->args->count );
+ for ( i = 0; i < frame->args->count; ++i )
+ {
+ PyObject * arg = PyList_New(0);
+ LIST* l = lol_get( frame->args, i);
+
+ for ( ; l; l = l->next )
+ {
+ PyObject * v = PyString_FromString(l->string);
+ PyList_Append( arg, v );
+ Py_DECREF(v);
+ }
+ /* Steals reference to 'arg' */
+ PyTuple_SetItem( arguments, i, arg );
+ }
+ }
+
+ frame_before_python_call = frame;
+ py_result = PyObject_Call( r->python_function, arguments, kw );
+ Py_DECREF(arguments);
+ Py_XDECREF(kw);
+ if ( py_result != NULL )
+ {
+ if ( PyList_Check( py_result ) )
+ {
+ int size = PyList_Size( py_result );
+ int i;
+ for ( i = 0; i < size; ++i )
+ {
+ PyObject * item = PyList_GetItem( py_result, i );
+ char *s = python_to_string (item);
+ if (!s) {
+ fprintf( stderr, "Non-string object returned by Python call.\n" );
+ } else {
+ result = list_new (result, s);
+ }
+ }
+ }
+ else if ( py_result == Py_None )
+ {
+ result = L0;
+ }
+ else
+ {
+ char *s = python_to_string(py_result);
+ if (s)
+ result = list_new(0, s);
+ else
+ /* We have tried all we could. Return empty list. There are
+ cases, e.g. feature.feature function that should return
+ value for the benefit of Python code and which also can be
+ called by Jam code, where no sensible value can be
+ returned. We cannot even emit a warning, since there will
+ be a pile of them. */
+ result = L0;
+ }
+
+ Py_DECREF( py_result );
+ }
+ else
+ {
+ PyErr_Print();
+ fprintf(stderr,"Call failed\n");
+ }
+
+ return result;
+}
+
+
+module_t * python_module()
+{
+ static module_t * python = 0;
+ if ( !python )
+ python = bindmodule("__python__");
+ return python;
+}
+
+#endif
+
+
+/*
+ * evaluate_rule() - execute a rule invocation.
+ */
+
+LIST *
+evaluate_rule(
+ char * rulename,
+ FRAME * frame )
+{
+ LIST * result = L0;
+ RULE * rule;
+ profile_frame prof[1];
+ module_t * prev_module = frame->module;
+
+ LIST * l;
+ {
+ LOL arg_context_, * arg_context = &arg_context_;
+ if ( !frame->prev )
+ lol_init(arg_context);
+ else
+ arg_context = frame->prev->args;
+ l = var_expand( L0, rulename, rulename+strlen(rulename), arg_context, 0 );
+ }
+
+ if ( !l )
+ {
+ backtrace_line( frame->prev );
+ printf( "warning: rulename %s expands to empty string\n", rulename );
+ backtrace( frame->prev );
+ return result;
+ }
+
+ rulename = l->string;
+ rule = bindrule( l->string, frame->module );
+
+#ifdef HAVE_PYTHON
+ if ( rule->python_function )
+ {
+ /* The below messing with modules is due to the way modules are
+ * implemented in Jam. Suppose we are in module M1 now. The global
+ * variable map actually holds 'M1' variables, and M1->variables hold
+ * global variables.
+ *
+ * If we call Python right away, Python calls back Jam and then Jam
+ * does 'module M1 { }' then Jam will try to swap the current global
+ * variables with M1->variables. The result will be that global
+ * variables map will hold global variables, and any variable settings
+ * we do will go to the global module, not M1.
+ *
+ * By restoring basic state, where the global variable map holds global
+ * variable, we make sure any future 'module M1' entry will work OK.
+ */
+
+ LIST * result;
+ module_t * m = python_module();
+
+ frame->module = m;
+
+ exit_module( prev_module );
+ enter_module( m );
+
+ result = call_python_function( rule, frame );
+
+ exit_module( m );
+ enter_module ( prev_module );
+
+ return result;
+ }
+#endif
+
+ /* Drop the rule name. */
+ l = list_pop_front( l );
+
+ /* Tack the rest of the expansion onto the front of the first argument. */
+ frame->args->list[0] = list_append( l, lol_get( frame->args, 0 ) );
+
+ if ( DEBUG_COMPILE )
+ {
+ /* Try hard to indicate in which module the rule is going to execute. */
+ if ( rule->module != frame->module
+ && rule->procedure != 0 && strcmp( rulename, rule->procedure->rulename ) )
+ {
+ char buf[256] = "";
+ strncat( buf, rule->module->name, sizeof( buf ) - 1 );
+ strncat( buf, rule->name, sizeof( buf ) - 1 );
+ debug_compile( 1, buf, frame );
+ }
+ else
+ {
+ debug_compile( 1, rulename, frame );
+ }
+
+ lol_print( frame->args );
+ printf( "\n" );
+ }
+
+ if ( rule->procedure && rule->module != prev_module )
+ {
+ /* Propagate current module to nested rule invocations. */
+ frame->module = rule->module;
+
+ /* Swap variables. */
+ exit_module( prev_module );
+ enter_module( rule->module );
+ }
+
+ /* Record current rule name in frame. */
+ if ( rule->procedure )
+ {
+ frame->rulename = rulename;
+ /* And enter record profile info. */
+ if ( DEBUG_PROFILE )
+ profile_enter( rule->procedure->rulename, prof );
+ }
+
+ /* Check traditional targets $(<) and sources $(>). */
+ if ( !rule->actions && !rule->procedure )
+ {
+ backtrace_line( frame->prev );
+ printf( "rule %s unknown in module %s\n", rule->name, frame->module->name );
+ backtrace( frame->prev );
+ exit( 1 );
+ }
+
+ /* If this rule will be executed for updating the targets then construct the
+ * action for make().
+ */
+ if ( rule->actions )
+ {
+ TARGETS * t;
+ ACTION * action;
+
+ /* The action is associated with this instance of this rule. */
+ action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
+ memset( (char *)action, '\0', sizeof( *action ) );
+
+ action->rule = rule;
+ action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
+ action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
+
+ /* If we have a group of targets all being built using the same action
+ * then we must not allow any of them to be used as sources unless they
+ * had all already been built in the first place or their joined action
+ * has had a chance to finish its work and build all of them anew.
+ *
+ * Without this it might be possible, in case of a multi-process build,
+ * for their action, triggered by buiding one of the targets, to still
+ * be running when another target in the group reports as done in order
+ * to avoid triggering the same action again and gets used prematurely.
+ *
+ * As a quick-fix to achieve this effect we make all the targets list
+ * each other as 'included targets'. More precisely, we mark the first
+ * listed target as including all the other targets in the list and vice
+ * versa. This makes anyone depending on any of those targets implicitly
+ * depend on all of them, thus making sure none of those targets can be
+ * used as sources until all of them have been built. Note that direct
+ * dependencies could not have been used due to the 'circular
+ * dependency' issue.
+ *
+ * TODO: Although the current implementation solves the problem of one
+ * of the targets getting used before its action completes its work it
+ * also forces the action to run whenever any of the targets in the
+ * group is not up to date even though some of them might not actually
+ * be used by the targets being built. We should see how we can
+ * correctly recognize such cases and use that to avoid running the
+ * action if possible and not rebuild targets not actually depending on
+ * targets that are not up to date.
+ *
+ * TODO: Using the 'include' feature might have side-effects due to
+ * interaction with the actual 'inclusion scanning' system. This should
+ * be checked.
+ */
+ if ( action->targets )
+ {
+ TARGET * t0 = action->targets->target;
+ for ( t = action->targets->next; t; t = t->next )
+ {
+ target_include( t->target, t0 );
+ target_include( t0, t->target );
+ }
+ }
+
+ /* Append this action to the actions of each target. */
+ for ( t = action->targets; t; t = t->next )
+ t->target->actions = actionlist( t->target->actions, action );
+ }
+
+ /* Now recursively compile any parse tree associated with this rule.
+ * parse_refer()/parse_free() call pair added to ensure rule not freed
+ * during use.
+ */
+ if ( rule->procedure )
+ {
+ SETTINGS * local_args = collect_arguments( rule, frame );
+ PARSE * parse = rule->procedure;
+ parse_refer( parse );
+
+ pushsettings( local_args );
+ result = parse_evaluate( parse, frame );
+ popsettings( local_args );
+ freesettings( local_args );
+
+ parse_free( parse );
+ }
+
+ if ( frame->module != prev_module )
+ {
+ exit_module( frame->module );
+ enter_module( prev_module );
+ }
+
+ if ( DEBUG_PROFILE && rule->procedure )
+ profile_exit( prof );
+
+ if ( DEBUG_COMPILE )
+ debug_compile( -1, 0, frame);
+
+ return result;
+}
+
+
+/*
+ * Call the given rule with the specified parameters. The parameters should be
+ * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
+ * in that frame for the called rule is prepared inside 'call_rule'.
+ *
+ * This function is useful when a builtin rule (in C) wants to call another rule
+ * which might be implemented in Jam.
+ */
+
+LIST * call_rule( char * rulename, FRAME * caller_frame, ... )
+{
+ va_list va;
+ LIST * result;
+
+ FRAME inner[1];
+ frame_init( inner );
+ inner->prev = caller_frame;
+ inner->prev_user = caller_frame->module->user_module ?
+ caller_frame : caller_frame->prev_user;
+ inner->module = caller_frame->module;
+ inner->procedure = 0;
+
+ va_start( va, caller_frame );
+ for ( ; ; )
+ {
+ LIST * l = va_arg( va, LIST* );
+ if ( !l )
+ break;
+ lol_add( inner->args, l );
+ }
+ va_end( va );
+
+ result = evaluate_rule( rulename, inner );
+
+ frame_free( inner );
+
+ return result;
+}
+
+
+/*
+ * compile_rules() - compile a chain of rules
+ *
+ * parse->left single rule
+ * parse->right more compile_rules() by right-recursion
+ */
+
+LIST * compile_rules( PARSE * parse, FRAME * frame )
+{
+ /* Ignore result from first statement; return the 2nd. */
+ /* Optimize recursion on the right by looping. */
+ do list_free( parse_evaluate( parse->left, frame ) );
+ while ( ( parse = parse->right )->func == compile_rules );
+ return parse_evaluate( parse, frame );
+}
+
+
+/*
+ * assign_var_mode() - convert ASSIGN_XXX compilation flag into corresponding
+ * VAR_XXX variable set flag.
+ */
+
+static int assign_var_mode( int parsenum, char const * * tracetext )
+{
+ char const * trace;
+ int setflag;
+ switch ( parsenum )
+ {
+ case ASSIGN_SET : setflag = VAR_SET ; trace = "=" ; break;
+ case ASSIGN_APPEND : setflag = VAR_APPEND ; trace = "+="; break;
+ case ASSIGN_DEFAULT: setflag = VAR_DEFAULT; trace = "?="; break;
+ default: setflag = VAR_SET ; trace = "" ; break;
+ }
+ if ( tracetext )
+ *tracetext = trace ;
+ return setflag;
+}
+
+/*
+ * compile_set() - compile the "set variable" statement
+ *
+ * parse->left variable names
+ * parse->right variable values
+ * parse->num ASSIGN_SET/APPEND/DEFAULT
+ */
+
+LIST * compile_set( PARSE * parse, FRAME * frame )
+{
+ LIST * nt = parse_evaluate( parse->left, frame );
+ LIST * ns = parse_evaluate( parse->right, frame );
+ LIST * l;
+ char const * trace;
+ int setflag = assign_var_mode( parse->num, &trace );
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "set", frame );
+ list_print( nt );
+ printf( " %s ", trace );
+ list_print( ns );
+ printf( "\n" );
+ }
+
+ /* Call var_set to set variable. var_set keeps ns, so need to copy it. */
+ for ( l = nt; l; l = list_next( l ) )
+ var_set( l->string, list_copy( L0, ns ), setflag );
+ list_free( nt );
+ return ns;
+}
+
+
+/*
+ * compile_setcomp() - support for `rule` - save parse tree.
+ *
+ * parse->string rule name
+ * parse->left rules for rule
+ * parse->right optional list-of-lists describing arguments
+ */
+
+LIST * compile_setcomp( PARSE * parse, FRAME * frame )
+{
+ argument_list * arg_list = 0;
+
+ /* Create new LOL describing argument requirements if supplied. */
+ if ( parse->right )
+ {
+ PARSE * p;
+ arg_list = args_new();
+ for ( p = parse->right; p; p = p->left )
+ lol_add( arg_list->data, parse_evaluate( p->right, frame ) );
+ }
+
+ new_rule_body( frame->module, parse->string, arg_list, parse->left, !parse->num );
+ return L0;
+}
+
+
+/*
+ * compile_setexec() - support for `actions` - save execution string.
+ *
+ * parse->string rule name
+ * parse->string1 OS command string
+ * parse->num flags
+ * parse->left `bind` variables
+ *
+ * Note that the parse flags (as defined in compile.h) are transferred directly
+ * to the rule flags (as defined in rules.h).
+ */
+
+LIST * compile_setexec( PARSE * parse, FRAME * frame )
+{
+ LIST * bindlist = parse_evaluate( parse->left, frame );
+ new_rule_actions( frame->module, parse->string, parse->string1, bindlist, parse->num );
+ return L0;
+}
+
+
+/*
+ * compile_settings() - compile the "on =" (set variable on exec) statement.
+ *
+ * parse->left variable names
+ * parse->right target name
+ * parse->third variable value
+ * parse->num ASSIGN_SET/APPEND
+ */
+
+LIST * compile_settings( PARSE * parse, FRAME * frame )
+{
+ LIST * nt = parse_evaluate( parse->left, frame );
+ LIST * ns = parse_evaluate( parse->third, frame );
+ LIST * targets = parse_evaluate( parse->right, frame );
+ LIST * ts;
+ char const * trace;
+ int setflag = assign_var_mode( parse->num, &trace );
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "set", frame );
+ list_print( nt );
+ printf( " on " );
+ list_print( targets );
+ printf( " %s ", trace );
+ list_print( ns );
+ printf( "\n" );
+ }
+
+ /* Call addsettings() to save variable setting. addsettings() keeps ns, so
+ * need to copy it. Pass append flag to addsettings().
+ */
+ for ( ts = targets; ts; ts = list_next( ts ) )
+ {
+ TARGET * t = bindtarget( ts->string );
+ LIST * l;
+
+ for ( l = nt; l; l = list_next( l ) )
+ t->settings = addsettings( t->settings, setflag, l->string,
+ list_copy( (LIST *)0, ns ) );
+ }
+
+ list_free( nt );
+ list_free( targets );
+ return ns;
+}
+
+
+/*
+ * compile_switch() - compile 'switch' rule.
+ *
+ * parse->left switch value (only 1st used)
+ * parse->right cases
+ *
+ * cases->left 1st case
+ * cases->right next cases
+ *
+ * case->string argument to match
+ * case->left parse tree to execute
+ */
+
+LIST * compile_switch( PARSE * parse, FRAME * frame )
+{
+ LIST * nt = parse_evaluate( parse->left, frame );
+ LIST * result = 0;
+
+ if ( DEBUG_COMPILE )
+ {
+ debug_compile( 0, "switch", frame );
+ list_print( nt );
+ printf( "\n" );
+ }
+
+ /* Step through cases. */
+ for ( parse = parse->right; parse; parse = parse->right )
+ {
+ if ( !glob( parse->left->string, nt ? nt->string : "" ) )
+ {
+ /* Get & exec parse tree for this case. */
+ parse = parse->left->left;
+ result = parse_evaluate( parse, frame );
+ break;
+ }
+ }
+
+ list_free( nt );
+ return result;
+}
+
+
+/*
+ * debug_compile() - printf with indent to show rule expansion.
+ */
+
+static void debug_compile( int which, char * s, FRAME * frame )
+{
+ static int level = 0;
+ static char indent[36] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
+
+ if ( which >= 0 )
+ {
+ int i;
+
+ print_source_line( frame->procedure );
+
+ i = ( level + 1 ) * 2;
+ while ( i > 35 )
+ {
+ fputs( indent, stdout );
+ i -= 35;
+ }
+
+ printf( "%*.*s ", i, i, indent );
+ }
+
+ if ( s )
+ printf( "%s ", s );
+
+ level += which;
+}
diff --git a/jam-files/engine/compile.h b/jam-files/engine/compile.h
new file mode 100644
index 000000000..7d5191f0e
--- /dev/null
+++ b/jam-files/engine/compile.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef COMPILE_DWA20011022_H
+# define COMPILE_DWA20011022_H
+
+# include "frames.h"
+# include "parse.h"
+# include "regexp.h"
+
+/*
+ * compile.h - compile parsed jam statements
+ */
+
+void compile_builtins();
+
+LIST *compile_append( PARSE *parse, FRAME *frame );
+LIST *compile_foreach( PARSE *parse, FRAME *frame );
+LIST *compile_if( PARSE *parse, FRAME *frame );
+LIST *compile_eval( PARSE *parse, FRAME *args );
+LIST *compile_include( PARSE *parse, FRAME *frame );
+LIST *compile_list( PARSE *parse, FRAME *frame );
+LIST *compile_local( PARSE *parse, FRAME *frame );
+LIST *compile_module( PARSE *parse, FRAME *frame );
+LIST *compile_class( PARSE *parse, FRAME *frame );
+LIST *compile_null( PARSE *parse, FRAME *frame );
+LIST *compile_on( PARSE *parse, FRAME *frame );
+LIST *compile_rule( PARSE *parse, FRAME *frame );
+LIST *compile_rules( PARSE *parse, FRAME *frame );
+LIST *compile_set( PARSE *parse, FRAME *frame );
+LIST *compile_setcomp( PARSE *parse, FRAME *frame );
+LIST *compile_setexec( PARSE *parse, FRAME *frame );
+LIST *compile_settings( PARSE *parse, FRAME *frame );
+LIST *compile_switch( PARSE *parse, FRAME *frame );
+LIST *compile_while( PARSE *parse, FRAME *frame );
+
+LIST *evaluate_rule( char *rulename, FRAME *frame );
+LIST *call_rule( char *rulename, FRAME* caller_frame, ...);
+
+regexp* regex_compile( const char* pattern );
+
+/* Flags for compile_set(), etc */
+
+# define ASSIGN_SET 0x00 /* = assign variable */
+# define ASSIGN_APPEND 0x01 /* += append variable */
+# define ASSIGN_DEFAULT 0x02 /* set only if unset */
+
+/* Flags for compile_setexec() */
+
+# define EXEC_UPDATED 0x01 /* executes updated */
+# define EXEC_TOGETHER 0x02 /* executes together */
+# define EXEC_IGNORE 0x04 /* executes ignore */
+# define EXEC_QUIETLY 0x08 /* executes quietly */
+# define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
+# define EXEC_EXISTING 0x20 /* executes existing */
+
+/* Conditions for compile_if() */
+
+# define EXPR_NOT 0 /* ! cond */
+# define EXPR_AND 1 /* cond && cond */
+# define EXPR_OR 2 /* cond || cond */
+
+# define EXPR_EXISTS 3 /* arg */
+# define EXPR_EQUALS 4 /* arg = arg */
+# define EXPR_NOTEQ 5 /* arg != arg */
+# define EXPR_LESS 6 /* arg < arg */
+# define EXPR_LESSEQ 7 /* arg <= arg */
+# define EXPR_MORE 8 /* arg > arg */
+# define EXPR_MOREEQ 9 /* arg >= arg */
+# define EXPR_IN 10 /* arg in arg */
+
+#endif
+
diff --git a/jam-files/engine/debian/changelog b/jam-files/engine/debian/changelog
new file mode 100644
index 000000000..29084289c
--- /dev/null
+++ b/jam-files/engine/debian/changelog
@@ -0,0 +1,72 @@
+bjam (3.1.12-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Sat, 01 Oct 2005 00:00:00 +0000
+
+bjam (3.1.11-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Sat, 30 Apr 2005 00:00:00 +0000
+
+bjam (3.1.10-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Tue, 1 Jun 2004 05:42:35 +0000
+
+bjam (3.1.9-2) unstable; urgency=low
+
+ * Use default value of BOOST_BUILD_PATH is not is set in environment.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Wed, 17 Dec 2003 16:44:35 +0300
+
+bjam (3.1.9-1) unstable; urgency=low
+
+ * Implement NATIVE_FILE builtin and several native rules.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Dec 2003 13:15:26 +0300
+
+bjam (3.1.8-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 4 Nov 2003 20:50:43 +0300
+
+bjam (3.1.7-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Sep 2003 10:45:44 +0400
+
+bjam (3.1.6-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 1 Jul 2003 09:12:18 +0400
+
+bjam (3.1.5-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Mon, 19 May 2003 14:05:13 +0400
+
+bjam (3.1.3-2) unstable; urgency=low
+
+ * Changed Debian package to be similar to Jam's package.
+
+ -- Vladimir Prus <ghost@cs.msu.su> Thu, 10 Oct 2002 18:43:26 +0400
+
+bjam (3.1.3-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Fri, 4 Oct 2002 18:16:54 +0400
+
+bjam (3.1.2-1) unstable; urgency=low
+
+ * Initial Release.
+
+ -- Vladimir Prus <ghost@cs.msu.su> Wed, 14 Aug 2002 14:08:00 +0400
+
diff --git a/jam-files/engine/debian/control b/jam-files/engine/debian/control
new file mode 100644
index 000000000..c7f151932
--- /dev/null
+++ b/jam-files/engine/debian/control
@@ -0,0 +1,16 @@
+Source: bjam
+Section: devel
+Priority: optional
+Maintainer: Vladimir Prus <ghost@cs.msu.su>
+Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison
+Standards-Version: 3.5.2
+
+Package: bjam
+Architecture: any
+Depends: ${shlibs:Depends}
+Description: Build tool
+ Boost.Jam is a portable build tool with its own interpreted language, which
+ allows to implement rather complex logic in a readable way and without
+ resorting to external programs. It is a descendant of Jam/MR tool modified to
+ suit the needs of Boost.Build. In particular, modules and rule parameters
+ were added, as well as several new builtins.
diff --git a/jam-files/engine/debian/copyright b/jam-files/engine/debian/copyright
new file mode 100644
index 000000000..f72e4e3a9
--- /dev/null
+++ b/jam-files/engine/debian/copyright
@@ -0,0 +1,25 @@
+This package was debianized by Vladimir Prus <ghost@cs.msu.su> on
+Wed, 17 July 2002, 19:27:00 +0400.
+
+Copyright:
+
+ /+\
+ +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ \+/
+
+ This is Release 2.4 of Jam/MR, a make-like program.
+
+ License is hereby granted to use this software and distribute it
+ freely, as long as this copyright notice is retained and modifications
+ are clearly marked.
+
+ ALL WARRANTIES ARE HEREBY DISCLAIMED.
+
+Some portions are also:
+
+ Copyright 2001-2006 David Abrahams.
+ Copyright 2002-2006 Rene Rivera.
+ Copyright 2003-2006 Vladimir Prus.
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/jam-files/engine/debian/jam.man.sgml b/jam-files/engine/debian/jam.man.sgml
new file mode 100644
index 000000000..ee21d4d83
--- /dev/null
+++ b/jam-files/engine/debian/jam.man.sgml
@@ -0,0 +1,236 @@
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
+
+<!-- Process this file with docbook-to-man to generate an nroff manual
+ page: `docbook-to-man manpage.sgml > manpage.1'. You may view
+ the manual page with: `docbook-to-man manpage.sgml | nroff -man |
+ less'. A typical entry in a Makefile or Makefile.am is:
+
+manpage.1: manpage.sgml
+ docbook-to-man $< > $@
+ -->
+
+ <!ENTITY dhfirstname "<firstname>Yann</firstname>">
+ <!ENTITY dhsurname "<surname>Dirson</surname>">
+ <!-- Please adjust the date whenever revising the manpage. -->
+ <!ENTITY dhdate "<date>mai 23, 2001</date>">
+ <!ENTITY dhemail "<email>dirson@debian.org</email>">
+ <!ENTITY dhusername "Yann Dirson">
+ <!ENTITY dhpackage "jam">
+
+ <!ENTITY debian "<productname>Debian GNU/Linux</productname>">
+ <!ENTITY gnu "<acronym>GNU</acronym>">
+]>
+
+<refentry>
+ <refentryinfo>
+ <address>
+ &dhemail;
+ </address>
+ <author>
+ &dhfirstname;
+ &dhsurname;
+ </author>
+ <copyright>
+ <year>2001</year>
+ <holder>&dhusername;</holder>
+ </copyright>
+ &dhdate;
+ </refentryinfo>
+
+ <refmeta>
+ <refentrytitle>JAM</refentrytitle>
+ <manvolnum>1</manvolnum>
+ </refmeta>
+
+ <refnamediv>
+ <refname>Jam/MR</refname>
+ <refpurpose>Make(1) Redux</refpurpose>
+ </refnamediv>
+
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>jam</command>
+
+ <arg><option>-a</option></arg>
+ <arg><option>-n</option></arg>
+ <arg><option>-v</option></arg>
+
+ <arg><option>-d <replaceable/debug/</option></arg>
+ <arg><option>-f <replaceable/jambase/</option></arg>
+ <arg><option>-j <replaceable/jobs/</option></arg>
+ <arg><option>-o <replaceable/actionsfile/</option></arg>
+ <arg><option>-s <replaceable/var/=<replaceable/value/</option></arg>
+ <arg><option>-t <replaceable/target/</option></arg>
+
+ <arg repeat><option><replaceable/target/</option></arg>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <refsect1>
+ <title>DESCRIPTION</title>
+
+ <para>Jam is a program construction tool, like make(1).</para>
+
+ <para>Jam recursively builds target files from source files, using
+ dependency information and updating actions expressed in the
+ Jambase file, which is written in jam's own interpreted language.
+ The default Jambase is compiled into jam and provides a
+ boilerplate for common use, relying on a user-provide file
+ "Jamfile" to enumerate actual targets and sources.</para>
+ </refsect1>
+
+ <refsect1>
+ <title>OPTIONS</title>
+
+ <variablelist>
+ <varlistentry>
+ <term><option/-a/</term>
+ <listitem>
+ <para>Build all targets anyway, even if they are up-to-date.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d <replaceable/n/</option></term>
+ <listitem>
+ <para>Enable cummulative debugging levels from 1 to
+ <replaceable/n/. Interesting values are:
+
+ <glosslist>
+ <glossentry><glossterm/1/ <glossdef><simpara/Show
+ actions (the default)/</glossdef></glossentry>
+
+ <glossentry><glossterm/2/ <glossdef><simpara/Show
+ "quiet" actions and display all action
+ text/</glossdef></glossentry>
+
+ <glossentry><glossterm/3/ <glossdef><simpara>Show
+ dependency analysis, and target/source
+ timestamps/paths</simpara></glossdef></glossentry>
+
+ <glossentry><glossterm/4/ <glossdef><simpara/Show shell
+ arguments/</glossdef></glossentry>
+
+ <glossentry><glossterm/5/ <glossdef><simpara/Show rule
+ invocations and variable
+ expansions/</glossdef></glossentry>
+
+ <glossentry><glossterm/6/ <glossdef><simpara>Show
+ directory/header file/archive
+ scans</simpara></glossdef></glossentry>
+
+ <glossentry><glossterm/7/ <glossdef><simpara/Show
+ variable settings/</glossdef></glossentry>
+
+ <glossentry><glossterm/8/ <glossdef><simpara/Show
+ variable fetches/</glossdef></glossentry>
+
+ <glossentry><glossterm/9/ <glossdef><simpara/Show
+ variable manipulation, scanner
+ tokens/</glossdef></glossentry>
+ </glosslist>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d +<replaceable/n/</option></term>
+ <listitem>
+ <para>Enable debugging level <replaceable/n/.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-d 0/</term>
+ <listitem>
+ <para>Turn off all debugging levels. Only errors are not
+ suppressed.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-f <replaceable/jambase/</option></term>
+ <listitem>
+ <para>Read <replaceable/jambase/ instead of using the
+ built-in Jambase. Only one <option/-f/ flag is permitted,
+ but the <replaceable/jambase/ may explicitly include other
+ files.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-j <replaceable/n/</option></term>
+ <listitem>
+ <para>Run up to <replaceable/n/ shell commands concurrently
+ (UNIX and NT only). The default is 1.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-n/</term>
+ <listitem>
+ <para>Don't actually execute the updating actions, but do
+ everything else. This changes the debug level default to
+ <option/-d2/.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-o <replaceable/file/</option></term>
+ <listitem>
+ <para>Write the updating actions to the specified file
+ instead of running them (or outputting them, as on the
+ Mac).</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-s <replaceable/var/=<replaceable/value/</option></term>
+ <listitem>
+ <para>Set the variable <replaceable/var/ to
+ <replaceable/value/, overriding both internal variables and
+ variables imported from the environment. </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-t <replaceable/target/</option></term>
+ <listitem>
+ <para>Rebuild <replaceable/target/ and everything that
+ depends on it, even if it is up-to-date.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-v/</term>
+ <listitem>
+ <para>Print the version of jam and exit.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+ </refsect1>
+
+ <refsect1>
+ <title>SEE ALSO</title>
+
+ <para>Jam is documented fully in HTML pages available on Debian
+ systems from
+ <filename>/usr/share/doc/jam/Jam.html</filename>.</para>
+ </refsect1>
+
+ <refsect1>
+ <title>AUTHOR</title>
+
+ <para>This manual page was created by &dhusername; &dhemail; from
+ the <filename/Jam.html/ documentation, for the &debian; system
+ (but may be used by others).</para>
+ </refsect1>
+</refentry>
+
+<!-- Keep this comment at the end of the file
+Local variables:
+sgml-omittag:t
+sgml-shorttag:t
+End:
+-->
diff --git a/jam-files/engine/debian/rules b/jam-files/engine/debian/rules
new file mode 100755
index 000000000..756052a3b
--- /dev/null
+++ b/jam-files/engine/debian/rules
@@ -0,0 +1,73 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+# GNU copyright 2001 by Yann Dirson.
+
+# This is the debian/rules file for packages jam and ftjam
+# It should be usable with both packages without any change
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+# This is the debhelper compatability version to use.
+export DH_COMPAT=3
+
+topdir=$(shell pwd)
+
+jam=bjam
+binname=bjam
+
+build: build-stamp
+build-stamp: debian/jam.1
+ dh_testdir
+
+ ./build.sh
+
+ touch build-stamp
+
+%.1: %.man.sgml
+ /usr/bin/docbook-to-man $< > $@
+
+clean:
+ dh_testdir
+ dh_testroot
+ rm -f build-stamp
+ rm -rf bin.*
+ rm -f jam0 debian/jam.1
+ dh_clean
+
+install: build
+ dh_testdir
+ dh_testroot
+ dh_clean -k
+ dh_installdirs
+
+ install -d ${topdir}/debian/${jam}/usr/bin
+ install -m755 bin.linuxx86/bjam ${topdir}/debian/${jam}/usr/bin/
+ install -d ${topdir}/debian/${jam}/usr/share/man/man1/
+ install -m644 debian/jam.1 ${topdir}/debian/${jam}/usr/share/man/man1/${binname}.1
+
+
+# Build architecture-independent files here.
+binary-indep: build install
+# We have nothing to do by default.
+
+# Build architecture-dependent files here.
+binary-arch: build install
+ dh_testdir
+ dh_testroot
+ dh_installdocs README RELNOTES Jambase *.html
+# dh_installemacsen
+# dh_undocumented
+ dh_installchangelogs
+ dh_strip
+ dh_compress
+ dh_fixperms
+ dh_installdeb
+ dh_shlibdeps
+ dh_gencontrol
+ dh_md5sums
+ dh_builddeb
+
+binary: binary-indep binary-arch
+.PHONY: build clean binary-indep binary-arch binary install configure
diff --git a/jam-files/engine/debug.c b/jam-files/engine/debug.c
new file mode 100644
index 000000000..7290555a7
--- /dev/null
+++ b/jam-files/engine/debug.c
@@ -0,0 +1,132 @@
+/*
+ Copyright Rene Rivera 2005.
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
+
+#include "hash.h"
+
+#include <time.h>
+#include <assert.h>
+
+
+static profile_frame * profile_stack = 0;
+static struct hash * profile_hash = 0;
+static profile_info profile_other = { "[OTHER]", 0, 0, 0, 0, 0 };
+static profile_info profile_total = { "[TOTAL]", 0, 0, 0, 0, 0 };
+
+
+profile_frame * profile_init( char * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
+ return frame;
+}
+
+
+void profile_enter( char * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ clock_t start = clock();
+ profile_info info;
+ profile_info * p = &info;
+
+ if ( !rulename ) p = &profile_other;
+
+ if ( !profile_hash && rulename )
+ profile_hash = hashinit( sizeof( profile_info ), "profile" );
+
+ info.name = rulename;
+
+ if ( rulename && hashenter( profile_hash, (HASHDATA * *)&p ) )
+ p->cumulative = p->net = p->num_entries = p->stack_count = p->memory = 0;
+
+ ++p->num_entries;
+ ++p->stack_count;
+
+ frame->info = p;
+
+ frame->caller = profile_stack;
+ profile_stack = frame;
+
+ frame->entry_time = clock();
+ frame->overhead = 0;
+ frame->subrules = 0;
+
+ /* caller pays for the time it takes to play with the hash table */
+ if ( frame->caller )
+ frame->caller->overhead += frame->entry_time - start;
+ }
+}
+
+
+void profile_memory( long mem )
+{
+ if ( DEBUG_PROFILE )
+ if ( profile_stack && profile_stack->info )
+ profile_stack->info->memory += mem;
+}
+
+
+void profile_exit( profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ /* Cumulative time for this call. */
+ clock_t t = clock() - frame->entry_time - frame->overhead;
+ /* If this rule is already present on the stack, don't add the time for
+ * this instance.
+ */
+ if ( frame->info->stack_count == 1 )
+ frame->info->cumulative += t;
+ /* Net time does not depend on presense of the same rule in call stack.
+ */
+ frame->info->net += t - frame->subrules;
+
+ if ( frame->caller )
+ {
+ /* Caller's cumulative time must account for this overhead. */
+ frame->caller->overhead += frame->overhead;
+ frame->caller->subrules += t;
+ }
+ /* Pop this stack frame. */
+ --frame->info->stack_count;
+ profile_stack = frame->caller;
+ }
+}
+
+
+static void dump_profile_entry( void * p_, void * ignored )
+{
+ profile_info * p = (profile_info *)p_;
+ unsigned long mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1 ) );
+ double cumulative = p->cumulative;
+ double net = p->net;
+ double q = p->net;
+ q /= ( p->num_entries ? p->num_entries : 1 );
+ cumulative /= CLOCKS_PER_SEC;
+ net /= CLOCKS_PER_SEC;
+ q /= CLOCKS_PER_SEC;
+ if ( !ignored )
+ {
+ profile_total.cumulative += p->net;
+ profile_total.memory += p->memory;
+ }
+ printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries,
+ cumulative, net, q, p->memory, mem_each, p->name );
+}
+
+
+void profile_dump()
+{
+ if ( profile_hash )
+ {
+ printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
+ "--net--", "--each--", "--mem--", "--each--", "--name--" );
+ hashenumerate( profile_hash, dump_profile_entry, 0 );
+ dump_profile_entry( &profile_other, 0 );
+ dump_profile_entry( &profile_total, (void *)1 );
+ }
+}
diff --git a/jam-files/engine/debug.h b/jam-files/engine/debug.h
new file mode 100644
index 000000000..115a88735
--- /dev/null
+++ b/jam-files/engine/debug.h
@@ -0,0 +1,54 @@
+/*
+ Copyright Rene Rivera 2005.
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+#ifndef BJAM_DEBUG_H
+#define BJAM_DEBUG_H
+
+#include "jam.h"
+#include <time.h>
+
+
+struct profile_info
+{
+ /* name of rule being called */
+ char* name;
+ /* cumulative time spent in rule */
+ clock_t cumulative;
+ /* time spent in rule proper */
+ clock_t net;
+ /* number of time rule was entered */
+ unsigned long num_entries;
+ /* number of the times this function is present in stack */
+ unsigned long stack_count;
+ /* bytes of memory allocated by the call */
+ unsigned long memory;
+};
+typedef struct profile_info profile_info;
+
+struct profile_frame
+{
+ /* permanent storage where data accumulates */
+ profile_info* info;
+ /* overhead for profiling in this call */
+ clock_t overhead;
+ /* time of last entry to rule */
+ clock_t entry_time;
+ /* stack frame of caller */
+ struct profile_frame* caller;
+ /* time spent in subrules */
+ clock_t subrules;
+};
+typedef struct profile_frame profile_frame;
+
+profile_frame * profile_init( char * rulename, profile_frame * frame );
+void profile_enter( char* rulename, profile_frame * frame );
+void profile_memory( long mem );
+void profile_exit( profile_frame * frame );
+void profile_dump();
+
+#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( #scope, &PROF_ ## scope )
+#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
+
+#endif
diff --git a/jam-files/engine/execcmd.h b/jam-files/engine/execcmd.h
new file mode 100644
index 000000000..67f2b839c
--- /dev/null
+++ b/jam-files/engine/execcmd.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * execcmd.h - execute a shell script.
+ *
+ * Defines the interface to be implemented in platform specific implementation
+ * modules.
+ *
+ * 05/04/94 (seiwald) - async multiprocess interface
+ */
+
+#ifndef EXECCMD_H
+#define EXECCMD_H
+
+#include <time.h>
+
+typedef struct timing_info
+{
+ double system;
+ double user;
+ time_t start;
+ time_t end;
+} timing_info;
+
+void exec_cmd
+(
+ char * string,
+ void (* func)( void * closure, int status, timing_info *, char *, char * ),
+ void * closure,
+ LIST * shell,
+ char * action,
+ char * target
+);
+
+int exec_wait();
+
+#define EXEC_CMD_OK 0
+#define EXEC_CMD_FAIL 1
+#define EXEC_CMD_INTR 2
+
+#endif
diff --git a/jam-files/engine/execmac.c b/jam-files/engine/execmac.c
new file mode 100644
index 000000000..2ddddedd1
--- /dev/null
+++ b/jam-files/engine/execmac.c
@@ -0,0 +1,69 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "execcmd.h"
+#include <errno.h>
+
+#ifdef OS_MAC
+
+/*
+ * execunix.c - execute a shell script on UNIX
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate execvp().
+ * The default is:
+ *
+ * /bin/sh -c %
+ *
+ * Each word must be an individual element in a jam variable value.
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to
+ * the slot number (starting at 1) for multiprocess (-j) invocations.
+ * If $(JAMSHELL) doesn't include a %, it is tacked on as the last
+ * argument.
+ *
+ * Don't just set JAMSHELL to /bin/sh - it won't work!
+ *
+ * External routines:
+ * exec_cmd() - launch an async command execution.
+ * exec_wait() - wait and drive at most one execution completion.
+ *
+ * Internal routines:
+ * onintr() - bump intr to note command interruption.
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 05/04/94 (seiwald) - async multiprocess interface
+ * 01/22/95 (seiwald) - $(JAMSHELL) support
+ */
+
+
+/*
+ * exec_cmd() - launch an async command execution.
+ */
+
+void exec_cmd
+(
+ char * string,
+ void (* func)( void * closure, int status, timing_info *, char *, char * ),
+ void * closure,
+ LIST * shell
+)
+{
+ printf( "%s", string );
+ (*func)( closure, EXEC_CMD_OK );
+}
+
+/*
+ * exec_wait() - wait and drive at most one execution completion.
+ */
+
+int exec_wait()
+{
+ return 0;
+}
+
+#endif /* OS_MAC */
diff --git a/jam-files/engine/execnt.c b/jam-files/engine/execnt.c
new file mode 100644
index 000000000..764204518
--- /dev/null
+++ b/jam-files/engine/execnt.c
@@ -0,0 +1,1296 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2007 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "execcmd.h"
+#include "pathsys.h"
+#include "string.h"
+#include "output.h"
+#include <errno.h>
+#include <assert.h>
+#include <ctype.h>
+#include <time.h>
+#include <math.h>
+
+#ifdef USE_EXECNT
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <process.h>
+#include <tlhelp32.h>
+
+/*
+ * execnt.c - execute a shell command on Windows NT
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
+ * The default is:
+ *
+ * /bin/sh -c % [ on UNIX/AmigaOS ]
+ * cmd.exe /c % [ on Windows NT ]
+ *
+ * Each word must be an individual element in a jam variable value.
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to
+ * the slot number (starting at 1) for multiprocess (-j) invocations.
+ * If $(JAMSHELL) doesn't include a %, it is tacked on as the last
+ * argument.
+ *
+ * Don't just set JAMSHELL to /bin/sh or cmd.exe - it won't work!
+ *
+ * External routines:
+ * exec_cmd() - launch an async command execution.
+ * exec_wait() - wait and drive at most one execution completion.
+ *
+ * Internal routines:
+ * onintr() - bump intr to note command interruption.
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 05/04/94 (seiwald) - async multiprocess interface
+ * 01/22/95 (seiwald) - $(JAMSHELL) support
+ * 06/02/97 (gsar) - full async multiprocess support for Win32
+ */
+
+/* get the maximum command line length according to the OS */
+int maxline();
+
+/* delete and argv list */
+static void free_argv(char**);
+/* Convert a command string into arguments for spawnvp. */
+static char** string_to_args(const char*);
+/* bump intr to note command interruption */
+static void onintr(int);
+/* If the command is suitable for execution via spawnvp */
+long can_spawn(char*);
+/* Add two 64-bit unsigned numbers, h1l1 and h2l2 */
+static FILETIME add_64(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2);
+static FILETIME add_FILETIME(FILETIME t1, FILETIME t2);
+static FILETIME negate_FILETIME(FILETIME t);
+/* Convert a FILETIME to a number of seconds */
+static double filetime_seconds(FILETIME t);
+/* record the timing info for the process */
+static void record_times(HANDLE, timing_info*);
+/* calc the current running time of an *active* process */
+static double running_time(HANDLE);
+/* */
+DWORD get_process_id(HANDLE);
+/* terminate the given process, after terminating all its children */
+static void kill_process_tree(DWORD, HANDLE);
+/* waits for a command to complete or for the given timeout, whichever is first */
+static int try_wait(int timeoutMillis);
+/* reads any pending output for running commands */
+static void read_output();
+/* checks if a command ran out of time, and kills it */
+static int try_kill_one();
+/* */
+static double creation_time(HANDLE);
+/* Recursive check if first process is parent (directly or indirectly) of
+the second one. */
+static int is_parent_child(DWORD, DWORD);
+/* */
+static void close_alert(HANDLE);
+/* close any alerts hanging around */
+static void close_alerts();
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+static int intr = 0;
+static int cmdsrunning = 0;
+static void (* istat)( int );
+
+
+/* The list of commands we run. */
+static struct
+{
+ string action; /* buffer to hold action */
+ string target; /* buffer to hold target */
+ string command; /* buffer to hold command being invoked */
+
+ /* Temporary batch file used to execute the action when needed. */
+ char * tempfile_bat;
+
+ /* Pipes for communicating with the child process. Parent reads from (0),
+ * child writes to (1).
+ */
+ HANDLE pipe_out[ 2 ];
+ HANDLE pipe_err[ 2 ];
+
+ string buffer_out; /* buffer to hold stdout, if any */
+ string buffer_err; /* buffer to hold stderr, if any */
+
+ PROCESS_INFORMATION pi; /* running process information */
+ DWORD exit_code; /* executed command's exit code */
+ int exit_reason; /* reason why a command completed */
+
+ /* Function called when the command completes. */
+ void (* func)( void * closure, int status, timing_info *, char *, char * );
+
+ /* Opaque data passed back to the 'func' callback called when the command
+ * completes.
+ */
+ void * closure;
+}
+cmdtab[ MAXJOBS ] = { { 0 } };
+
+
+/*
+ * Execution unit tests.
+ */
+
+void execnt_unit_test()
+{
+#if !defined( NDEBUG )
+ /* vc6 preprocessor is broken, so assert with these strings gets confused.
+ * Use a table instead.
+ */
+ typedef struct test { char * command; int result; } test;
+ test tests[] = {
+ { "x", 0 },
+ { "x\n ", 0 },
+ { "x\ny", 1 },
+ { "x\n\n y", 1 },
+ { "echo x > foo.bar", 1 },
+ { "echo x < foo.bar", 1 },
+ { "echo x \">\" foo.bar", 0 },
+ { "echo x \"<\" foo.bar", 0 },
+ { "echo x \\\">\\\" foo.bar", 1 },
+ { "echo x \\\"<\\\" foo.bar", 1 } };
+ int i;
+ for ( i = 0; i < sizeof( tests ) / sizeof( *tests ); ++i )
+ assert( !can_spawn( tests[ i ].command ) == tests[ i ].result );
+
+ {
+ char * long_command = BJAM_MALLOC_ATOMIC( MAXLINE + 10 );
+ assert( long_command != 0 );
+ memset( long_command, 'x', MAXLINE + 9 );
+ long_command[ MAXLINE + 9 ] = 0;
+ assert( can_spawn( long_command ) == MAXLINE + 9 );
+ BJAM_FREE( long_command );
+ }
+
+ {
+ /* Work around vc6 bug; it doesn't like escaped string
+ * literals inside assert
+ */
+ char * * argv = string_to_args(" \"g++\" -c -I\"Foobar\"" );
+ char const expected[] = "-c -I\"Foobar\"";
+
+ assert( !strcmp( argv[ 0 ], "g++" ) );
+ assert( !strcmp( argv[ 1 ], expected ) );
+ free_argv( argv );
+ }
+#endif
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution.
+ */
+
+void exec_cmd
+(
+ char * command,
+ void (* func)( void * closure, int status, timing_info *, char * invoked_command, char * command_output ),
+ void * closure,
+ LIST * shell,
+ char * action,
+ char * target
+)
+{
+ int slot;
+ int raw_cmd = 0 ;
+ char * argv_static[ MAXARGC + 1 ]; /* +1 for NULL */
+ char * * argv = argv_static;
+ char * p;
+ char * command_orig = command;
+
+ /* Check to see if we need to hack around the line-length limitation. Look
+ * for a JAMSHELL setting of "%", indicating that the command should be
+ * invoked directly.
+ */
+ if ( shell && !strcmp( shell->string, "%" ) && !list_next( shell ) )
+ {
+ raw_cmd = 1;
+ shell = 0;
+ }
+
+ /* Find a slot in the running commands table for this one. */
+ for ( slot = 0; slot < MAXJOBS; ++slot )
+ if ( !cmdtab[ slot ].pi.hProcess )
+ break;
+ if ( slot == MAXJOBS )
+ {
+ printf( "no slots for child!\n" );
+ exit( EXITBAD );
+ }
+
+ /* Compute the name of a temp batch file, for possible use. */
+ if ( !cmdtab[ slot ].tempfile_bat )
+ {
+ char const * tempdir = path_tmpdir();
+ DWORD procID = GetCurrentProcessId();
+
+ /* SVA - allocate 64 bytes extra just to be safe. */
+ cmdtab[ slot ].tempfile_bat = BJAM_MALLOC_ATOMIC( strlen( tempdir ) + 64 );
+
+ sprintf( cmdtab[ slot ].tempfile_bat, "%s\\jam%d-%02d.bat",
+ tempdir, procID, slot );
+ }
+
+ /* Trim leading, -ending- white space */
+ while ( *( command + 1 ) && isspace( *command ) )
+ ++command;
+
+ /* Write to .BAT file unless the line would be too long and it meets the
+ * other spawnability criteria.
+ */
+ if ( raw_cmd && ( can_spawn( command ) >= MAXLINE ) )
+ {
+ if ( DEBUG_EXECCMD )
+ printf("Executing raw command directly\n");
+ }
+ else
+ {
+ FILE * f = 0;
+ int tries = 0;
+ raw_cmd = 0;
+
+ /* Write command to bat file. For some reason this open can fail
+ * intermitently. But doing some retries works. Most likely this is due
+ * to a previously existing file of the same name that happens to be
+ * opened by an active virus scanner. Pointed out and fixed by Bronek
+ * Kozicki.
+ */
+ for ( ; !f && ( tries < 4 ); ++tries )
+ {
+ f = fopen( cmdtab[ slot ].tempfile_bat, "w" );
+ if ( !f && ( tries < 4 ) ) Sleep( 250 );
+ }
+ if ( !f )
+ {
+ printf( "failed to write command file!\n" );
+ exit( EXITBAD );
+ }
+ fputs( command, f );
+ fclose( f );
+
+ command = cmdtab[ slot ].tempfile_bat;
+
+ if ( DEBUG_EXECCMD )
+ {
+ if ( shell )
+ printf( "using user-specified shell: %s", shell->string );
+ else
+ printf( "Executing through .bat file\n" );
+ }
+ }
+
+ /* Formulate argv; If shell was defined, be prepared for % and ! subs.
+ * Otherwise, use stock cmd.exe.
+ */
+ if ( shell )
+ {
+ int i;
+ char jobno[ 4 ];
+ int gotpercent = 0;
+
+ sprintf( jobno, "%d", slot + 1 );
+
+ for ( i = 0; shell && ( i < MAXARGC ); ++i, shell = list_next( shell ) )
+ {
+ switch ( shell->string[ 0 ] )
+ {
+ case '%': argv[ i ] = command; ++gotpercent; break;
+ case '!': argv[ i ] = jobno; break;
+ default : argv[ i ] = shell->string;
+ }
+ if ( DEBUG_EXECCMD )
+ printf( "argv[%d] = '%s'\n", i, argv[ i ] );
+ }
+
+ if ( !gotpercent )
+ argv[ i++ ] = command;
+
+ argv[ i ] = 0;
+ }
+ else if ( raw_cmd )
+ {
+ argv = string_to_args( command );
+ }
+ else
+ {
+ argv[ 0 ] = "cmd.exe";
+ argv[ 1 ] = "/Q/C"; /* anything more is non-portable */
+ argv[ 2 ] = command;
+ argv[ 3 ] = 0;
+ }
+
+ /* Catch interrupts whenever commands are running. */
+ if ( !cmdsrunning++ )
+ istat = signal( SIGINT, onintr );
+
+ /* Start the command. */
+ {
+ SECURITY_ATTRIBUTES sa
+ = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 };
+ SECURITY_DESCRIPTOR sd;
+ STARTUPINFO si
+ = { sizeof( STARTUPINFO ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
+ string cmd;
+
+ /* Init the security data. */
+ InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION );
+ SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE );
+ sa.lpSecurityDescriptor = &sd;
+ sa.bInheritHandle = TRUE;
+
+ /* Create the stdout, which is also the merged out + err, pipe. */
+ if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ 0 ],
+ &cmdtab[ slot ].pipe_out[ 1 ], &sa, 0 ) )
+ {
+ perror( "CreatePipe" );
+ exit( EXITBAD );
+ }
+
+ /* Create the stdout, which is also the merged out+err, pipe. */
+ if ( globs.pipe_action == 2 )
+ {
+ if ( !CreatePipe( &cmdtab[ slot ].pipe_err[ 0 ],
+ &cmdtab[ slot ].pipe_err[ 1 ], &sa, 0 ) )
+ {
+ perror( "CreatePipe" );
+ exit( EXITBAD );
+ }
+ }
+
+ /* Set handle inheritance off for the pipe ends the parent reads from. */
+ SetHandleInformation( cmdtab[ slot ].pipe_out[ 0 ], HANDLE_FLAG_INHERIT, 0 );
+ if ( globs.pipe_action == 2 )
+ SetHandleInformation( cmdtab[ slot ].pipe_err[ 0 ], HANDLE_FLAG_INHERIT, 0 );
+
+ /* Hide the child window, if any. */
+ si.dwFlags |= STARTF_USESHOWWINDOW;
+ si.wShowWindow = SW_HIDE;
+
+ /* Set the child outputs to the pipes. */
+ si.dwFlags |= STARTF_USESTDHANDLES;
+ si.hStdOutput = cmdtab[ slot ].pipe_out[ 1 ];
+ if ( globs.pipe_action == 2 )
+ {
+ /* Pipe stderr to the action error output. */
+ si.hStdError = cmdtab[ slot ].pipe_err[ 1 ];
+ }
+ else if ( globs.pipe_action == 1 )
+ {
+ /* Pipe stderr to the console error output. */
+ si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
+ }
+ else
+ {
+ /* Pipe stderr to the action merged output. */
+ si.hStdError = cmdtab[ slot ].pipe_out[ 1 ];
+ }
+
+ /* Let the child inherit stdin, as some commands assume it's available. */
+ si.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
+
+ /* Save the operation for exec_wait() to find. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+ if ( action && target )
+ {
+ string_copy( &cmdtab[ slot ].action, action );
+ string_copy( &cmdtab[ slot ].target, target );
+ }
+ else
+ {
+ string_free( &cmdtab[ slot ].action );
+ string_new ( &cmdtab[ slot ].action );
+ string_free( &cmdtab[ slot ].target );
+ string_new ( &cmdtab[ slot ].target );
+ }
+ string_copy( &cmdtab[ slot ].command, command_orig );
+
+ /* Put together the command we run. */
+ {
+ char * * argp = argv;
+ string_new( &cmd );
+ string_copy( &cmd, *(argp++) );
+ while ( *argp )
+ {
+ string_push_back( &cmd, ' ' );
+ string_append( &cmd, *(argp++) );
+ }
+ }
+
+ /* Create output buffers. */
+ string_new( &cmdtab[ slot ].buffer_out );
+ string_new( &cmdtab[ slot ].buffer_err );
+
+ /* Run the command by creating a sub-process for it. */
+ if (
+ ! CreateProcess(
+ NULL , /* application name */
+ cmd.value , /* command line */
+ NULL , /* process attributes */
+ NULL , /* thread attributes */
+ TRUE , /* inherit handles */
+ CREATE_NEW_PROCESS_GROUP, /* create flags */
+ NULL , /* env vars, null inherits env */
+ NULL , /* current dir, null is our */
+ /* current dir */
+ &si , /* startup info */
+ &cmdtab[ slot ].pi /* child process info, if created */
+ )
+ )
+ {
+ perror( "CreateProcess" );
+ exit( EXITBAD );
+ }
+
+ /* Clean up temporary stuff. */
+ string_free( &cmd );
+ }
+
+ /* Wait until we are under the limit of concurrent commands. Do not trust
+ * globs.jobs alone.
+ */
+ while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
+ if ( !exec_wait() )
+ break;
+
+ if ( argv != argv_static )
+ free_argv( argv );
+}
+
+
+/*
+ * exec_wait()
+ * * wait and drive at most one execution completion.
+ * * waits for one command to complete, while processing the i/o for all
+ * ongoing commands.
+ *
+ * Returns 0 if called when there were no more commands being executed or 1
+ * otherwise.
+ */
+
+int exec_wait()
+{
+ int i = -1;
+
+ /* Handle naive make1() which does not know if cmds are running. */
+ if ( !cmdsrunning )
+ return 0;
+
+ /* Wait for a command to complete, while snarfing up any output. */
+ do
+ {
+ /* Check for a complete command, briefly. */
+ i = try_wait(500);
+ /* Read in the output of all running commands. */
+ read_output();
+ /* Close out pending debug style dialogs. */
+ close_alerts();
+ /* Check if a command ran out of time. */
+ if ( i < 0 ) i = try_kill_one();
+ }
+ while ( i < 0 );
+
+ /* We have a command... process it. */
+ --cmdsrunning;
+ {
+ timing_info time;
+ int rstat;
+
+ /* The time data for the command. */
+ record_times( cmdtab[ i ].pi.hProcess, &time );
+
+ /* Clear the temp file. */
+ if ( cmdtab[ i ].tempfile_bat )
+ {
+ unlink( cmdtab[ i ].tempfile_bat );
+ BJAM_FREE( cmdtab[ i ].tempfile_bat );
+ cmdtab[ i ].tempfile_bat = NULL;
+ }
+
+ /* Find out the process exit code. */
+ GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &cmdtab[ i ].exit_code );
+
+ /* The dispossition of the command. */
+ if ( intr )
+ rstat = EXEC_CMD_INTR;
+ else if ( cmdtab[ i ].exit_code != 0 )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Output the action block. */
+ out_action(
+ cmdtab[ i ].action.size > 0 ? cmdtab[ i ].action.value : 0,
+ cmdtab[ i ].target.size > 0 ? cmdtab[ i ].target.value : 0,
+ cmdtab[ i ].command.size > 0 ? cmdtab[ i ].command.value : 0,
+ cmdtab[ i ].buffer_out.size > 0 ? cmdtab[ i ].buffer_out.value : 0,
+ cmdtab[ i ].buffer_err.size > 0 ? cmdtab[ i ].buffer_err.value : 0,
+ cmdtab[ i ].exit_reason );
+
+ /* Call the callback, may call back to jam rule land. Assume -p0 in
+ * effect so only pass buffer containing merged output.
+ */
+ (*cmdtab[ i ].func)(
+ cmdtab[ i ].closure,
+ rstat,
+ &time,
+ cmdtab[ i ].command.value,
+ cmdtab[ i ].buffer_out.value );
+
+ /* Clean up the command data, process, etc. */
+ string_free( &cmdtab[ i ].action ); string_new( &cmdtab[ i ].action );
+ string_free( &cmdtab[ i ].target ); string_new( &cmdtab[ i ].target );
+ string_free( &cmdtab[ i ].command ); string_new( &cmdtab[ i ].command );
+ if ( cmdtab[ i ].pi.hProcess ) { CloseHandle( cmdtab[ i ].pi.hProcess ); cmdtab[ i ].pi.hProcess = 0; }
+ if ( cmdtab[ i ].pi.hThread ) { CloseHandle( cmdtab[ i ].pi.hThread ); cmdtab[ i ].pi.hThread = 0; }
+ if ( cmdtab[ i ].pipe_out[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 0 ] ); cmdtab[ i ].pipe_out[ 0 ] = 0; }
+ if ( cmdtab[ i ].pipe_out[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 1 ] ); cmdtab[ i ].pipe_out[ 1 ] = 0; }
+ if ( cmdtab[ i ].pipe_err[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 0 ] ); cmdtab[ i ].pipe_err[ 0 ] = 0; }
+ if ( cmdtab[ i ].pipe_err[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 1 ] ); cmdtab[ i ].pipe_err[ 1 ] = 0; }
+ string_free( &cmdtab[ i ].buffer_out ); string_new( &cmdtab[ i ].buffer_out );
+ string_free( &cmdtab[ i ].buffer_err ); string_new( &cmdtab[ i ].buffer_err );
+ cmdtab[ i ].exit_code = 0;
+ cmdtab[ i ].exit_reason = EXIT_OK;
+ }
+
+ return 1;
+}
+
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+static void free_argv( char * * args )
+{
+ BJAM_FREE( args[ 0 ] );
+ BJAM_FREE( args );
+}
+
+
+/*
+ * For more details on Windows cmd.exe shell command-line length limitations see
+ * the following MSDN article:
+ * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473
+ */
+
+int maxline()
+{
+ OSVERSIONINFO os_info;
+ os_info.dwOSVersionInfoSize = sizeof( os_info );
+ GetVersionEx( &os_info );
+
+ if ( os_info.dwMajorVersion >= 5 ) return 8191; /* XP > */
+ if ( os_info.dwMajorVersion == 4 ) return 2047; /* NT 4.x */
+ return 996; /* NT 3.5.1 */
+}
+
+
+/*
+ * Convert a command string into arguments for spawnvp(). The original code,
+ * inherited from ftjam, tried to break up every argument on the command-line,
+ * dealing with quotes, but that is really a waste of time on Win32, at least.
+ * It turns out that all you need to do is get the raw path to the executable in
+ * the first argument to spawnvp(), and you can pass all the rest of the
+ * command-line arguments to spawnvp() in one, un-processed string.
+ *
+ * New strategy: break the string in at most one place.
+ */
+
+static char * * string_to_args( char const * string )
+{
+ int src_len;
+ int in_quote;
+ char * line;
+ char const * src;
+ char * dst;
+ char * * argv;
+
+ /* Drop leading and trailing whitespace if any. */
+ while ( isspace( *string ) )
+ ++string;
+
+ src_len = strlen( string );
+ while ( ( src_len > 0 ) && isspace( string[ src_len - 1 ] ) )
+ --src_len;
+
+ /* Copy the input string into a buffer we can modify. */
+ line = (char *)BJAM_MALLOC_ATOMIC( src_len + 1 );
+ if ( !line )
+ return 0;
+
+ /* Allocate the argv array.
+ * element 0: stores the path to the executable
+ * element 1: stores the command-line arguments to the executable
+ * element 2: NULL terminator
+ */
+ argv = (char * *)BJAM_MALLOC( 3 * sizeof( char * ) );
+ if ( !argv )
+ {
+ BJAM_FREE( line );
+ return 0;
+ }
+
+ /* Strip quotes from the first command-line argument and find where it ends.
+ * Quotes are illegal in Win32 pathnames, so we do not need to worry about
+ * preserving escaped quotes here. Spaces can not be escaped in Win32, only
+ * enclosed in quotes, so removing backslash escapes is also a non-issue.
+ */
+ in_quote = 0;
+ for ( src = string, dst = line ; *src; ++src )
+ {
+ if ( *src == '"' )
+ in_quote = !in_quote;
+ else if ( !in_quote && isspace( *src ) )
+ break;
+ else
+ *dst++ = *src;
+ }
+ *dst++ = 0;
+ argv[ 0 ] = line;
+
+ /* Skip whitespace in src. */
+ while ( isspace( *src ) )
+ ++src;
+
+ argv[ 1 ] = dst;
+
+ /* Copy the rest of the arguments verbatim. */
+ src_len -= src - string;
+
+ /* Use strncat() because it appends a trailing nul. */
+ *dst = 0;
+ strncat( dst, src, src_len );
+
+ argv[ 2 ] = 0;
+
+ return argv;
+}
+
+
+static void onintr( int disp )
+{
+ ++intr;
+ printf( "...interrupted\n" );
+}
+
+
+/*
+ * can_spawn() - If the command is suitable for execution via spawnvp(), return
+ * a number >= the number of characters it would occupy on the command-line.
+ * Otherwise, return zero.
+ */
+
+long can_spawn( char * command )
+{
+ char * p;
+ char inquote = 0;
+
+ /* Move to the first non-whitespace. */
+ command += strspn( command, " \t" );
+
+ p = command;
+
+ /* Look for newlines and unquoted i/o redirection. */
+ do
+ {
+ p += strcspn( p, "'\n\"<>|" );
+
+ switch ( *p )
+ {
+ case '\n':
+ /* Skip over any following spaces. */
+ while ( isspace( *p ) )
+ ++p;
+ /* Must use a .bat file if there is anything significant following
+ * the newline.
+ */
+ if ( *p )
+ return 0;
+ break;
+
+ case '"':
+ case '\'':
+ if ( ( p > command ) && ( p[ -1 ] != '\\' ) )
+ {
+ if ( inquote == *p )
+ inquote = 0;
+ else if ( inquote == 0 )
+ inquote = *p;
+ }
+ ++p;
+ break;
+
+ case '<':
+ case '>':
+ case '|':
+ if ( !inquote )
+ return 0;
+ ++p;
+ break;
+ }
+ }
+ while ( *p );
+
+ /* Return the number of characters the command will occupy. */
+ return p - command;
+}
+
+
+/* 64-bit arithmetic helpers. */
+
+/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */
+#define add_carry_bit( a, b ) ( (((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1 )
+
+/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1 and h2l2. */
+#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2))
+
+
+/*
+ * Add two 64-bit unsigned numbers, h1l1 and h2l2.
+ */
+
+static FILETIME add_64
+(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2
+)
+{
+ FILETIME result;
+ result.dwLowDateTime = l1 + l2;
+ result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 );
+ return result;
+}
+
+
+static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 )
+{
+ return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime,
+ t2.dwLowDateTime );
+}
+
+
+static FILETIME negate_FILETIME( FILETIME t )
+{
+ /* 2s complement negation */
+ return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 );
+}
+
+
+/*
+ * Convert a FILETIME to a number of seconds.
+ */
+
+static double filetime_seconds( FILETIME t )
+{
+ return t.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) + t.dwLowDateTime * 1.0e-7;
+}
+
+
+/*
+ * What should be a simple conversion, turns out to be horribly complicated by
+ * the defficiencies of MSVC and the Win32 API.
+ */
+
+static time_t filetime_dt( FILETIME t_utc )
+{
+ static int calc_time_diff = 1;
+ static double time_diff;
+ if ( calc_time_diff )
+ {
+ struct tm t0_;
+ FILETIME f0_local;
+ FILETIME f0_;
+ SYSTEMTIME s0_;
+ GetSystemTime( &s0_ );
+ t0_.tm_year = s0_.wYear-1900;
+ t0_.tm_mon = s0_.wMonth-1;
+ t0_.tm_wday = s0_.wDayOfWeek;
+ t0_.tm_mday = s0_.wDay;
+ t0_.tm_hour = s0_.wHour;
+ t0_.tm_min = s0_.wMinute;
+ t0_.tm_sec = s0_.wSecond;
+ t0_.tm_isdst = 0;
+ SystemTimeToFileTime( &s0_, &f0_local );
+ LocalFileTimeToFileTime( &f0_local, &f0_ );
+ time_diff = filetime_seconds( f0_ ) - (double)mktime( &t0_ );
+ calc_time_diff = 0;
+ }
+ return ceil( filetime_seconds( t_utc ) - time_diff );
+}
+
+
+static void record_times( HANDLE process, timing_info * time )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ time->system = filetime_seconds( kernel );
+ time->user = filetime_seconds( user );
+ time->start = filetime_dt ( creation );
+ time->end = filetime_dt ( exit );
+ }
+}
+
+
+#define IO_BUFFER_SIZE ( 16 * 1024 )
+
+static char ioBuffer[ IO_BUFFER_SIZE + 1 ];
+
+
+static void read_pipe
+(
+ HANDLE in, /* the pipe to read from */
+ string * out
+)
+{
+ DWORD bytesInBuffer = 0;
+ DWORD bytesAvailable = 0;
+
+ do
+ {
+ /* check if we have any data to read */
+ if ( !PeekNamedPipe( in, ioBuffer, IO_BUFFER_SIZE, &bytesInBuffer, &bytesAvailable, NULL ) )
+ bytesAvailable = 0;
+
+ /* read in the available data */
+ if ( bytesAvailable > 0 )
+ {
+ /* we only read in the available bytes, to avoid blocking */
+ if ( ReadFile( in, ioBuffer,
+ bytesAvailable <= IO_BUFFER_SIZE ? bytesAvailable : IO_BUFFER_SIZE,
+ &bytesInBuffer, NULL ) )
+ {
+ if ( bytesInBuffer > 0 )
+ {
+ /* Clean up some illegal chars. */
+ int i;
+ for ( i = 0; i < bytesInBuffer; ++i )
+ {
+ if ( ( (unsigned char)ioBuffer[ i ] < 1 ) )
+ ioBuffer[ i ] = '?';
+ }
+ /* Null, terminate. */
+ ioBuffer[ bytesInBuffer ] = '\0';
+ /* Append to the output. */
+ string_append( out, ioBuffer );
+ /* Subtract what we read in. */
+ bytesAvailable -= bytesInBuffer;
+ }
+ else
+ {
+ /* Likely read a error, bail out. */
+ bytesAvailable = 0;
+ }
+ }
+ else
+ {
+ /* Definitely read a error, bail out. */
+ bytesAvailable = 0;
+ }
+ }
+ }
+ while ( bytesAvailable > 0 );
+}
+
+
+static void read_output()
+{
+ int i;
+ for ( i = 0; i < globs.jobs && i < MAXJOBS; ++i )
+ {
+ /* Read stdout data. */
+ if ( cmdtab[ i ].pipe_out[ 0 ] )
+ read_pipe( cmdtab[ i ].pipe_out[ 0 ], & cmdtab[ i ].buffer_out );
+ /* Read stderr data. */
+ if ( cmdtab[ i ].pipe_err[ 0 ] )
+ read_pipe( cmdtab[ i ].pipe_err[ 0 ], & cmdtab[ i ].buffer_err );
+ }
+}
+
+
+/*
+ * Waits for a single child process command to complete, or the timeout,
+ * whichever comes first. Returns the index of the completed command in the
+ * cmdtab array, or -1.
+ */
+
+static int try_wait( int timeoutMillis )
+{
+ int i;
+ int num_active;
+ int wait_api_result;
+ HANDLE active_handles[ MAXJOBS ];
+ int active_procs[ MAXJOBS ];
+
+ /* Prepare a list of all active processes to wait for. */
+ for ( num_active = 0, i = 0; i < globs.jobs; ++i )
+ {
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ active_handles[ num_active ] = cmdtab[ i ].pi.hProcess;
+ active_procs[ num_active ] = i;
+ ++num_active;
+ }
+ }
+
+ /* Wait for a child to complete, or for our timeout window to expire. */
+ wait_api_result = WaitForMultipleObjects( num_active, active_handles,
+ FALSE, timeoutMillis );
+ if ( ( WAIT_OBJECT_0 <= wait_api_result ) &&
+ ( wait_api_result < WAIT_OBJECT_0 + num_active ) )
+ {
+ /* Rerminated process detected - return its index. */
+ return active_procs[ wait_api_result - WAIT_OBJECT_0 ];
+ }
+
+ /* Timeout. */
+ return -1;
+}
+
+
+static int try_kill_one()
+{
+ /* Only need to check if a timeout was specified with the -l option. */
+ if ( globs.timeout > 0 )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ double t = running_time( cmdtab[ i ].pi.hProcess );
+ if ( t > (double)globs.timeout )
+ {
+ /* The job may have left an alert dialog around, try and get rid
+ * of it before killing
+ */
+ close_alert( cmdtab[ i ].pi.hProcess );
+ /* We have a "runaway" job, kill it. */
+ kill_process_tree( 0, cmdtab[ i ].pi.hProcess );
+ /* And return it marked as a timeout. */
+ cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
+ return i;
+ }
+ }
+ }
+ return -1;
+}
+
+
+static void close_alerts()
+{
+ /* We only attempt this every 5 seconds, or so, because it is not a cheap
+ * operation, and we will catch the alerts eventually. This check uses
+ * floats as some compilers define CLOCKS_PER_SEC as a float or double.
+ */
+ if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ close_alert( cmdtab[ i ].pi.hProcess );
+ }
+}
+
+
+/*
+ * Calc the current running time of an *active* process.
+ */
+
+static double running_time( HANDLE process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ FILETIME current;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ /* Compute the elapsed time. */
+ GetSystemTimeAsFileTime( &current );
+ return filetime_seconds( add_FILETIME( current,
+ negate_FILETIME( creation ) ) );
+ }
+ return 0.0;
+}
+
+
+/* It is just stupidly silly that one has to do this. */
+typedef struct PROCESS_BASIC_INFORMATION__
+{
+ LONG ExitStatus;
+ PVOID PebBaseAddress;
+ ULONG AffinityMask;
+ LONG BasePriority;
+ ULONG UniqueProcessId;
+ ULONG InheritedFromUniqueProcessId;
+} PROCESS_BASIC_INFORMATION_;
+typedef LONG (__stdcall * NtQueryInformationProcess__)(
+ HANDLE ProcessHandle,
+ LONG ProcessInformationClass,
+ PVOID ProcessInformation,
+ ULONG ProcessInformationLength,
+ PULONG ReturnLength);
+static NtQueryInformationProcess__ NtQueryInformationProcess_ = NULL;
+static HMODULE NTDLL_ = NULL;
+DWORD get_process_id( HANDLE process )
+{
+ PROCESS_BASIC_INFORMATION_ pinfo;
+ if ( !NtQueryInformationProcess_ )
+ {
+ if ( ! NTDLL_ )
+ NTDLL_ = GetModuleHandleA( "ntdll" );
+ if ( NTDLL_ )
+ NtQueryInformationProcess_
+ = (NtQueryInformationProcess__)GetProcAddress( NTDLL_, "NtQueryInformationProcess" );
+ }
+ if ( NtQueryInformationProcess_ )
+ {
+ LONG r = (*NtQueryInformationProcess_)( process,
+ /* ProcessBasicInformation == */ 0, &pinfo,
+ sizeof( PROCESS_BASIC_INFORMATION_ ), NULL );
+ return pinfo.UniqueProcessId;
+ }
+ return 0;
+}
+
+
+/*
+ * Not really optimal, or efficient, but it is easier this way, and it is not
+ * like we are going to be killing thousands, or even tens of processes.
+ */
+
+static void kill_process_tree( DWORD pid, HANDLE process )
+{
+ HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
+ if ( !pid )
+ pid = get_process_id( process );
+ process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
+
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ParentProcessID == pid )
+ {
+ /* Found a child, recurse to kill it and anything else below it.
+ */
+ HANDLE ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE,
+ pinfo.th32ProcessID );
+ if ( NULL != ph )
+ {
+ kill_process_tree( pinfo.th32ProcessID, ph );
+ CloseHandle( ph );
+ }
+ }
+ }
+ CloseHandle( process_snapshot_h );
+ }
+ /* Now that the children are all dead, kill the root. */
+ TerminateProcess( process, -2 );
+}
+
+
+static double creation_time( HANDLE process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ FILETIME current;
+ return GetProcessTimes( process, &creation, &exit, &kernel, &user )
+ ? filetime_seconds( creation )
+ : 0.0;
+}
+
+
+/*
+ * Recursive check if first process is parent (directly or indirectly) of the
+ * second one. Both processes are passed as process ids, not handles. Special
+ * return value 2 means that the second process is smss.exe and its parent
+ * process is System (first argument is ignored).
+ */
+
+static int is_parent_child( DWORD parent, DWORD child )
+{
+ HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
+
+ if ( !child )
+ return 0;
+ if ( parent == child )
+ return 1;
+
+ process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ProcessID == child )
+ {
+ /* Unfortunately, process ids are not really unique. There might
+ * be spurious "parent and child" relationship match between two
+ * non-related processes if real parent process of a given
+ * process has exited (while child process kept running as an
+ * "orphan") and the process id of such parent process has been
+ * reused by internals of the operating system when creating
+ * another process.
+ *
+ * Thus additional check is needed - process creation time. This
+ * check may fail (i.e. return 0) for system processes due to
+ * insufficient privileges, and that is OK.
+ */
+ double tchild = 0.0;
+ double tparent = 0.0;
+ HANDLE hchild = OpenProcess( PROCESS_QUERY_INFORMATION, FALSE, pinfo.th32ProcessID );
+ CloseHandle( process_snapshot_h );
+
+ /* csrss.exe may display message box like following:
+ * xyz.exe - Unable To Locate Component
+ * This application has failed to start because
+ * boost_foo-bar.dll was not found. Re-installing the
+ * application may fix the problem
+ * This actually happens when starting test process that depends
+ * on a dynamic library which failed to build. We want to
+ * automatically close these message boxes even though csrss.exe
+ * is not our child process. We may depend on the fact that (in
+ * all current versions of Windows) csrss.exe is directly child
+ * of the smss.exe process, which in turn is directly child of
+ * the System process, which always has process id == 4. This
+ * check must be performed before comparison of process creation
+ * times.
+ */
+ if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) &&
+ ( is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 ) )
+ return 1;
+ if ( !stricmp( pinfo.szExeFile, "smss.exe" ) &&
+ ( pinfo.th32ParentProcessID == 4 ) )
+ return 2;
+
+ if ( hchild )
+ {
+ HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION,
+ FALSE, pinfo.th32ParentProcessID );
+ if ( hparent )
+ {
+ tchild = creation_time( hchild );
+ tparent = creation_time( hparent );
+ CloseHandle( hparent );
+ }
+ CloseHandle( hchild );
+ }
+
+ /* Return 0 if one of the following is true:
+ * 1. we failed to read process creation time
+ * 2. child was created before alleged parent
+ */
+ if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) ||
+ ( tchild < tparent ) )
+ return 0;
+
+ return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1;
+ }
+ }
+
+ CloseHandle( process_snapshot_h );
+ }
+
+ return 0;
+}
+
+typedef struct PROCESS_HANDLE_ID { HANDLE h; DWORD pid; } PROCESS_HANDLE_ID;
+
+
+/*
+ * This function is called by the operating system for each topmost window.
+ */
+
+BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam )
+{
+ char buf[ 7 ] = { 0 };
+ PROCESS_HANDLE_ID p = *( (PROCESS_HANDLE_ID *)lParam );
+ DWORD pid = 0;
+ DWORD tid = 0;
+
+ /* We want to find and close any window that:
+ * 1. is visible and
+ * 2. is a dialog and
+ * 3. is displayed by any of our child processes
+ */
+ if ( !IsWindowVisible( hwnd ) )
+ return TRUE;
+
+ if ( !GetClassNameA( hwnd, buf, sizeof( buf ) ) )
+ return TRUE; /* Failed to read class name; presume it is not a dialog. */
+
+ if ( strcmp( buf, "#32770" ) )
+ return TRUE; /* Not a dialog */
+
+ /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of
+ * window message pump thread.
+ */
+ tid = GetWindowThreadProcessId( hwnd, &pid );
+
+ if ( tid && is_parent_child( p.pid, pid ) )
+ {
+ /* Ask really nice. */
+ PostMessageA( hwnd, WM_CLOSE, 0, 0 );
+ /* Now wait and see if it worked. If not, insist. */
+ if ( WaitForSingleObject( p.h, 200 ) == WAIT_TIMEOUT )
+ {
+ PostThreadMessageA( tid, WM_QUIT, 0, 0 );
+ WaitForSingleObject( p.h, 300 );
+ }
+
+ /* Done, we do not want to check any other window now. */
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+
+static void close_alert( HANDLE process )
+{
+ DWORD pid = get_process_id( process );
+ /* If process already exited or we just can not get its process id, do not
+ * go any further.
+ */
+ if ( pid )
+ {
+ PROCESS_HANDLE_ID p;
+ p.h = process;
+ p.pid = pid;
+ EnumWindows( &close_alert_window_enum, (LPARAM)&p );
+ }
+}
+
+#endif /* USE_EXECNT */
diff --git a/jam-files/engine/execunix.c b/jam-files/engine/execunix.c
new file mode 100644
index 000000000..ef9dba003
--- /dev/null
+++ b/jam-files/engine/execunix.c
@@ -0,0 +1,569 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2007 Noel Belcourt.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "execcmd.h"
+#include "output.h"
+#include <errno.h>
+#include <signal.h>
+#include <stdio.h>
+#include <time.h>
+#include <unistd.h> /* needed for vfork(), _exit() prototypes */
+#include <sys/resource.h>
+#include <sys/times.h>
+#include <sys/wait.h>
+
+#if defined(sun) || defined(__sun) || defined(linux)
+ #include <wait.h>
+#endif
+
+#ifdef USE_EXECUNIX
+
+#include <sys/times.h>
+
+#if defined(__APPLE__)
+ #define NO_VFORK
+#endif
+
+#ifdef NO_VFORK
+ #define vfork() fork()
+#endif
+
+
+/*
+ * execunix.c - execute a shell script on UNIX/WinNT/OS2/AmigaOS
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
+ * The default is:
+ *
+ * /bin/sh -c % [ on UNIX/AmigaOS ]
+ * cmd.exe /c % [ on OS2/WinNT ]
+ *
+ * Each word must be an individual element in a jam variable value.
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to the slot
+ * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
+ * not include a %, it is tacked on as the last argument.
+ *
+ * Do not just set JAMSHELL to /bin/sh or cmd.exe - it will not work!
+ *
+ * External routines:
+ * exec_cmd() - launch an async command execution.
+ * exec_wait() - wait and drive at most one execution completion.
+ *
+ * Internal routines:
+ * onintr() - bump intr to note command interruption.
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 05/04/94 (seiwald) - async multiprocess interface
+ * 01/22/95 (seiwald) - $(JAMSHELL) support
+ * 06/02/97 (gsar) - full async multiprocess support for Win32
+ */
+
+static clock_t tps = 0;
+static struct timeval tv;
+static int select_timeout = 0;
+static int intr = 0;
+static int cmdsrunning = 0;
+static struct tms old_time;
+
+#define OUT 0
+#define ERR 1
+
+static struct
+{
+ int pid; /* on win32, a real process handle */
+ int fd[2]; /* file descriptors for stdout and stderr */
+ FILE *stream[2]; /* child's stdout (0) and stderr (1) file stream */
+ clock_t start_time; /* start time of child process */
+ int exit_reason; /* termination status */
+ int action_length; /* length of action string */
+ int target_length; /* length of target string */
+ char *action; /* buffer to hold action and target invoked */
+ char *target; /* buffer to hold action and target invoked */
+ char *command; /* buffer to hold command being invoked */
+ char *buffer[2]; /* buffer to hold stdout and stderr, if any */
+ void (*func)( void *closure, int status, timing_info*, char *, char * );
+ void *closure;
+ time_t start_dt; /* start of command timestamp */
+} cmdtab[ MAXJOBS ] = {{0}};
+
+/*
+ * onintr() - bump intr to note command interruption
+ */
+
+void onintr( int disp )
+{
+ ++intr;
+ printf( "...interrupted\n" );
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution.
+ */
+
+void exec_cmd
+(
+ char * string,
+ void (*func)( void *closure, int status, timing_info*, char *, char * ),
+ void * closure,
+ LIST * shell,
+ char * action,
+ char * target
+)
+{
+ static int initialized = 0;
+ int out[2];
+ int err[2];
+ int slot;
+ int len;
+ char * argv[ MAXARGC + 1 ]; /* +1 for NULL */
+
+ /* Find a slot in the running commands table for this one. */
+ for ( slot = 0; slot < MAXJOBS; ++slot )
+ if ( !cmdtab[ slot ].pid )
+ break;
+
+ if ( slot == MAXJOBS )
+ {
+ printf( "no slots for child!\n" );
+ exit( EXITBAD );
+ }
+
+ /* Forumulate argv. If shell was defined, be prepared for % and ! subs.
+ * Otherwise, use stock /bin/sh on unix or cmd.exe on NT.
+ */
+ if ( shell )
+ {
+ int i;
+ char jobno[4];
+ int gotpercent = 0;
+
+ sprintf( jobno, "%d", slot + 1 );
+
+ for ( i = 0; shell && i < MAXARGC; ++i, shell = list_next( shell ) )
+ {
+ switch ( shell->string[0] )
+ {
+ case '%': argv[ i ] = string; ++gotpercent; break;
+ case '!': argv[ i ] = jobno; break;
+ default : argv[ i ] = shell->string;
+ }
+ if ( DEBUG_EXECCMD )
+ printf( "argv[%d] = '%s'\n", i, argv[ i ] );
+ }
+
+ if ( !gotpercent )
+ argv[ i++ ] = string;
+
+ argv[ i ] = 0;
+ }
+ else
+ {
+ argv[ 0 ] = "/bin/sh";
+ argv[ 1 ] = "-c";
+ argv[ 2 ] = string;
+ argv[ 3 ] = 0;
+ }
+
+ /* Increment jobs running. */
+ ++cmdsrunning;
+
+ /* Save off actual command string. */
+ cmdtab[ slot ].command = BJAM_MALLOC_ATOMIC( strlen( string ) + 1 );
+ strcpy( cmdtab[ slot ].command, string );
+
+ /* Initialize only once. */
+ if ( !initialized )
+ {
+ times( &old_time );
+ initialized = 1;
+ }
+
+ /* Create pipes from child to parent. */
+ {
+ if ( pipe( out ) < 0 )
+ exit( EXITBAD );
+
+ if ( pipe( err ) < 0 )
+ exit( EXITBAD );
+ }
+
+ /* Start the command */
+
+ cmdtab[ slot ].start_dt = time(0);
+
+ if ( 0 < globs.timeout )
+ {
+ /*
+ * Handle hung processes by manually tracking elapsed time and signal
+ * process when time limit expires.
+ */
+ struct tms buf;
+ cmdtab[ slot ].start_time = times( &buf );
+
+ /* Make a global, only do this once. */
+ if ( tps == 0 ) tps = sysconf( _SC_CLK_TCK );
+ }
+
+ if ( ( cmdtab[ slot ].pid = vfork() ) == 0 )
+ {
+ int pid = getpid();
+
+ close( out[0] );
+ close( err[0] );
+
+ dup2( out[1], STDOUT_FILENO );
+
+ if ( globs.pipe_action == 0 )
+ dup2( out[1], STDERR_FILENO );
+ else
+ dup2( err[1], STDERR_FILENO );
+
+ close( out[1] );
+ close( err[1] );
+
+ /* Make this process a process group leader so that when we kill it, all
+ * child processes of this process are terminated as well. We use
+ * killpg(pid, SIGKILL) to kill the process group leader and all its
+ * children.
+ */
+ if ( 0 < globs.timeout )
+ {
+ struct rlimit r_limit;
+ r_limit.rlim_cur = globs.timeout;
+ r_limit.rlim_max = globs.timeout;
+ setrlimit( RLIMIT_CPU, &r_limit );
+ }
+ setpgid( pid,pid );
+ execvp( argv[0], argv );
+ perror( "execvp" );
+ _exit( 127 );
+ }
+ else if ( cmdtab[ slot ].pid == -1 )
+ {
+ perror( "vfork" );
+ exit( EXITBAD );
+ }
+
+ setpgid( cmdtab[ slot ].pid, cmdtab[ slot ].pid );
+
+ /* close write end of pipes */
+ close( out[1] );
+ close( err[1] );
+
+ /* set both file descriptors to non-blocking */
+ fcntl(out[0], F_SETFL, O_NONBLOCK);
+ fcntl(err[0], F_SETFL, O_NONBLOCK);
+
+ /* child writes stdout to out[1], parent reads from out[0] */
+ cmdtab[ slot ].fd[ OUT ] = out[0];
+ cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
+ if ( cmdtab[ slot ].stream[ OUT ] == NULL )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+
+ /* child writes stderr to err[1], parent reads from err[0] */
+ if (globs.pipe_action == 0)
+ {
+ close(err[0]);
+ }
+ else
+ {
+ cmdtab[ slot ].fd[ ERR ] = err[0];
+ cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
+ if ( cmdtab[ slot ].stream[ ERR ] == NULL )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+ }
+
+ /* Ensure enough room for rule and target name. */
+ if ( action && target )
+ {
+ len = strlen( action ) + 1;
+ if ( cmdtab[ slot ].action_length < len )
+ {
+ BJAM_FREE( cmdtab[ slot ].action );
+ cmdtab[ slot ].action = BJAM_MALLOC_ATOMIC( len );
+ cmdtab[ slot ].action_length = len;
+ }
+ strcpy( cmdtab[ slot ].action, action );
+ len = strlen( target ) + 1;
+ if ( cmdtab[ slot ].target_length < len )
+ {
+ BJAM_FREE( cmdtab[ slot ].target );
+ cmdtab[ slot ].target = BJAM_MALLOC_ATOMIC( len );
+ cmdtab[ slot ].target_length = len;
+ }
+ strcpy( cmdtab[ slot ].target, target );
+ }
+ else
+ {
+ BJAM_FREE( cmdtab[ slot ].action );
+ BJAM_FREE( cmdtab[ slot ].target );
+ cmdtab[ slot ].action = 0;
+ cmdtab[ slot ].target = 0;
+ cmdtab[ slot ].action_length = 0;
+ cmdtab[ slot ].target_length = 0;
+ }
+
+ /* Save the operation for exec_wait() to find. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+
+ /* Wait until we are under the limit of concurrent commands. Do not trust
+ * globs.jobs alone.
+ */
+ while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
+ if ( !exec_wait() )
+ break;
+}
+
+
+/* Returns 1 if file is closed, 0 if descriptor is still live.
+ *
+ * i is index into cmdtab
+ *
+ * s (stream) indexes:
+ * - cmdtab[ i ].stream[ s ]
+ * - cmdtab[ i ].buffer[ s ]
+ * - cmdtab[ i ].fd [ s ]
+ */
+
+int read_descriptor( int i, int s )
+{
+ int ret;
+ int len;
+ char buffer[BUFSIZ];
+
+ while ( 0 < ( ret = fread( buffer, sizeof(char), BUFSIZ-1, cmdtab[ i ].stream[ s ] ) ) )
+ {
+ buffer[ret] = 0;
+ if ( !cmdtab[ i ].buffer[ s ] )
+ {
+ /* Never been allocated. */
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
+ memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
+ }
+ else
+ {
+ /* Previously allocated. */
+ char * tmp = cmdtab[ i ].buffer[ s ];
+ len = strlen( tmp );
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( len + ret + 1 );
+ memcpy( cmdtab[ i ].buffer[ s ], tmp, len );
+ memcpy( cmdtab[ i ].buffer[ s ] + len, buffer, ret + 1 );
+ BJAM_FREE( tmp );
+ }
+ }
+
+ return feof(cmdtab[ i ].stream[ s ]);
+}
+
+
+void close_streams( int i, int s )
+{
+ /* Close the stream and pipe descriptor. */
+ fclose(cmdtab[ i ].stream[ s ]);
+ cmdtab[ i ].stream[ s ] = 0;
+
+ close(cmdtab[ i ].fd[ s ]);
+ cmdtab[ i ].fd[ s ] = 0;
+}
+
+
+void populate_file_descriptors( int * fmax, fd_set * fds)
+{
+ int i, fd_max = 0;
+ struct tms buf;
+ clock_t current = times( &buf );
+ select_timeout = globs.timeout;
+
+ /* Compute max read file descriptor for use in select. */
+ FD_ZERO(fds);
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ if ( 0 < cmdtab[ i ].fd[ OUT ] )
+ {
+ fd_max = fd_max < cmdtab[ i ].fd[ OUT ] ? cmdtab[ i ].fd[ OUT ] : fd_max;
+ FD_SET(cmdtab[ i ].fd[ OUT ], fds);
+ }
+ if ( globs.pipe_action != 0 )
+ {
+ if (0 < cmdtab[ i ].fd[ ERR ])
+ {
+ fd_max = fd_max < cmdtab[ i ].fd[ ERR ] ? cmdtab[ i ].fd[ ERR ] : fd_max;
+ FD_SET(cmdtab[ i ].fd[ ERR ], fds);
+ }
+ }
+
+ if (globs.timeout && cmdtab[ i ].pid) {
+ clock_t consumed = (current - cmdtab[ i ].start_time) / tps;
+ clock_t process_timesout = globs.timeout - consumed;
+ if (0 < process_timesout && process_timesout < select_timeout) {
+ select_timeout = process_timesout;
+ }
+ if ( globs.timeout <= consumed )
+ {
+ killpg( cmdtab[ i ].pid, SIGKILL );
+ cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
+ }
+ }
+ }
+ *fmax = fd_max;
+}
+
+
+/*
+ * exec_wait() - wait and drive at most one execution completion.
+ */
+
+int exec_wait()
+{
+ int i;
+ int ret;
+ int fd_max;
+ int pid;
+ int status;
+ int finished;
+ int rstat;
+ timing_info time_info;
+ fd_set fds;
+ struct tms new_time;
+
+ /* Handle naive make1() which does not know if commands are running. */
+ if ( !cmdsrunning )
+ return 0;
+
+ /* Process children that signaled. */
+ finished = 0;
+ while ( !finished && cmdsrunning )
+ {
+ /* Compute max read file descriptor for use in select(). */
+ populate_file_descriptors( &fd_max, &fds );
+
+ if ( 0 < globs.timeout )
+ {
+ /* Force select() to timeout so we can terminate expired processes.
+ */
+ tv.tv_sec = select_timeout;
+ tv.tv_usec = 0;
+
+ /* select() will wait until: i/o on a descriptor, a signal, or we
+ * time out.
+ */
+ ret = select( fd_max + 1, &fds, 0, 0, &tv );
+ }
+ else
+ {
+ /* select() will wait until i/o on a descriptor or a signal. */
+ ret = select( fd_max + 1, &fds, 0, 0, 0 );
+ }
+
+ if ( 0 < ret )
+ {
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ int out = 0;
+ int err = 0;
+ if ( FD_ISSET( cmdtab[ i ].fd[ OUT ], &fds ) )
+ out = read_descriptor( i, OUT );
+
+ if ( ( globs.pipe_action != 0 ) &&
+ ( FD_ISSET( cmdtab[ i ].fd[ ERR ], &fds ) ) )
+ err = read_descriptor( i, ERR );
+
+ /* If feof on either descriptor, then we are done. */
+ if ( out || err )
+ {
+ /* Close the stream and pipe descriptors. */
+ close_streams( i, OUT );
+ if ( globs.pipe_action != 0 )
+ close_streams( i, ERR );
+
+ /* Reap the child and release resources. */
+ pid = waitpid( cmdtab[ i ].pid, &status, 0 );
+
+ if ( pid == cmdtab[ i ].pid )
+ {
+ finished = 1;
+ pid = 0;
+ cmdtab[ i ].pid = 0;
+
+ /* Set reason for exit if not timed out. */
+ if ( WIFEXITED( status ) )
+ {
+ cmdtab[ i ].exit_reason = 0 == WEXITSTATUS( status )
+ ? EXIT_OK
+ : EXIT_FAIL;
+ }
+
+ /* Print out the rule and target name. */
+ out_action( cmdtab[ i ].action, cmdtab[ i ].target,
+ cmdtab[ i ].command, cmdtab[ i ].buffer[ OUT ],
+ cmdtab[ i ].buffer[ ERR ], cmdtab[ i ].exit_reason
+ );
+
+ times( &new_time );
+
+ time_info.system = (double)( new_time.tms_cstime - old_time.tms_cstime ) / CLOCKS_PER_SEC;
+ time_info.user = (double)( new_time.tms_cutime - old_time.tms_cutime ) / CLOCKS_PER_SEC;
+ time_info.start = cmdtab[ i ].start_dt;
+ time_info.end = time( 0 );
+
+ old_time = new_time;
+
+ /* Drive the completion. */
+ --cmdsrunning;
+
+ if ( intr )
+ rstat = EXEC_CMD_INTR;
+ else if ( status != 0 )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Assume -p0 in effect so only pass buffer[ 0 ]
+ * containing merged output.
+ */
+ (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat,
+ &time_info, cmdtab[ i ].command,
+ cmdtab[ i ].buffer[ 0 ] );
+
+ BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
+ cmdtab[ i ].buffer[ OUT ] = 0;
+
+ BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
+ cmdtab[ i ].buffer[ ERR ] = 0;
+
+ BJAM_FREE( cmdtab[ i ].command );
+ cmdtab[ i ].command = 0;
+
+ cmdtab[ i ].func = 0;
+ cmdtab[ i ].closure = 0;
+ cmdtab[ i ].start_time = 0;
+ }
+ else
+ {
+ printf( "unknown pid %d with errno = %d\n", pid, errno );
+ exit( EXITBAD );
+ }
+ }
+ }
+ }
+ }
+
+ return 1;
+}
+
+# endif /* USE_EXECUNIX */
diff --git a/jam-files/engine/execvms.c b/jam-files/engine/execvms.c
new file mode 100644
index 000000000..729917d35
--- /dev/null
+++ b/jam-files/engine/execvms.c
@@ -0,0 +1,161 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "execcmd.h"
+
+#ifdef OS_VMS
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <iodef.h>
+#include <ssdef.h>
+#include <descrip.h>
+#include <dvidef.h>
+#include <clidef.h>
+
+/*
+ * execvms.c - execute a shell script, ala VMS.
+ *
+ * The approach is this:
+ *
+ * If the command is a single line, and shorter than WRTLEN (what we believe to
+ * be the maximum line length), we just system() it.
+ *
+ * If the command is multi-line, or longer than WRTLEN, we write the command
+ * block to a temp file, splitting long lines (using "-" at the end of the line
+ * to indicate contiuation), and then source that temp file. We use special
+ * logic to make sure we do not continue in the middle of a quoted string.
+ *
+ * 05/04/94 (seiwald) - async multiprocess interface; noop on VMS
+ * 12/20/96 (seiwald) - rewritten to handle multi-line commands well
+ * 01/14/96 (seiwald) - do not put -'s between "'s
+ */
+
+#define WRTLEN 240
+
+#define MIN( a, b ) ((a) < (b) ? (a) : (b))
+
+/* 1 for the @ and 4 for the .com */
+
+char tempnambuf[ L_tmpnam + 1 + 4 ] = { 0 };
+
+
+void exec_cmd
+(
+ char * string,
+ void (* func)( void * closure, int status, timing_info *, char *, char * ),
+ void * closure,
+ LIST * shell,
+ char * rule_name,
+ char * target
+)
+{
+ char * s;
+ char * e;
+ cahr * p;
+ int rstat = EXEC_CMD_OK;
+ int status;
+
+ /* See if string is more than one line discounting leading/trailing white
+ * space.
+ */
+ for ( s = string; *s && isspace( *s ); ++s );
+
+ e = p = strchr( s, '\n' );
+
+ while ( p && isspace( *p ) )
+ ++p;
+
+ /* If multi line or long, write to com file. Otherwise, exec directly. */
+ if ( ( p && *p ) || ( e - s > WRTLEN ) )
+ {
+ FILE * f;
+
+ /* Create temp file invocation "@sys$scratch:tempfile.com". */
+ if ( !*tempnambuf )
+ {
+ tempnambuf[0] = '@';
+ (void)tmpnam( tempnambuf + 1 );
+ strcat( tempnambuf, ".com" );
+ }
+
+ /* Open tempfile. */
+ if ( !( f = fopen( tempnambuf + 1, "w" ) ) )
+ {
+ printf( "can't open command file\n" );
+ (*func)( closure, EXEC_CMD_FAIL );
+ return;
+ }
+
+ /* For each line of the string. */
+ while ( *string )
+ {
+ char * s = strchr( string, '\n' );
+ int len = s ? s + 1 - string : strlen( string );
+
+ fputc( '$', f );
+
+ /* For each chunk of a line that needs to be split. */
+ while ( len > 0 )
+ {
+ char * q = string;
+ char * qe = string + MIN( len, WRTLEN );
+ char * qq = q;
+ int quote = 0;
+
+ /* Look for matching "s. */
+ for ( ; q < qe; ++q )
+ if ( ( *q == '"' ) && ( quote = !quote ) )
+ qq = q;
+
+ /* Back up to opening quote, if in one. */
+ if ( quote )
+ q = qq;
+
+ fwrite( string, ( q - string ), 1, f );
+
+ len -= ( q - string );
+ string = q;
+
+ if ( len )
+ {
+ fputc( '-', f );
+ fputc( '\n', f );
+ }
+ }
+ }
+
+ fclose( f );
+
+ status = system( tempnambuf ) & 0x07;
+
+ unlink( tempnambuf + 1 );
+ }
+ else
+ {
+ /* Execute single line command. Strip trailing newline before execing.
+ */
+ if ( e ) *e = 0;
+ status = system( s ) & 0x07;
+ }
+
+ /* Fail for error or fatal error. OK on OK, warning or info exit. */
+ if ( ( status == 2 ) || ( status == 4 ) )
+ rstat = EXEC_CMD_FAIL;
+
+ (*func)( closure, rstat );
+}
+
+
+int exec_wait()
+{
+ return 0;
+}
+
+# endif /* VMS */
diff --git a/jam-files/engine/expand.c b/jam-files/engine/expand.c
new file mode 100644
index 000000000..d8e58827c
--- /dev/null
+++ b/jam-files/engine/expand.c
@@ -0,0 +1,733 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "lists.h"
+# include "variable.h"
+# include "expand.h"
+# include "pathsys.h"
+# include "newstr.h"
+# include <assert.h>
+# include <stdlib.h>
+# include <limits.h>
+
+# ifdef OS_CYGWIN
+# include <sys/cygwin.h>
+# include <windows.h>
+# endif
+
+/*
+ * expand.c - expand a buffer, given variable values
+ *
+ * External routines:
+ *
+ * var_expand() - variable-expand input string into list of strings
+ *
+ * Internal routines:
+ *
+ * var_edit_parse() - parse : modifiers into PATHNAME structure.
+ * var_edit_file() - copy input target name to output, modifying filename.
+ * var_edit_shift() - do upshift/downshift mods.
+ *
+ * 01/25/94 (seiwald) - $(X)$(UNDEF) was expanding like plain $(X)
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 01/11/01 (seiwald) - added support for :E=emptyvalue, :J=joinval
+ */
+
+typedef struct
+{
+ PATHNAME f; /* :GDBSMR -- pieces */
+ char parent; /* :P -- go to parent directory */
+ char filemods; /* one of the above applied */
+ char downshift; /* :L -- downshift result */
+ char upshift; /* :U -- upshift result */
+ char to_slashes; /* :T -- convert "\" to "/" */
+ char to_windows; /* :W -- convert cygwin to native paths */
+ PATHPART empty; /* :E -- default for empties */
+ PATHPART join; /* :J -- join list with char */
+} VAR_EDITS ;
+
+static void var_edit_parse( char * mods, VAR_EDITS * edits );
+static void var_edit_file ( char * in, string * out, VAR_EDITS * edits );
+static void var_edit_shift( string * out, VAR_EDITS * edits );
+
+#define MAGIC_COLON '\001'
+#define MAGIC_LEFT '\002'
+#define MAGIC_RIGHT '\003'
+
+
+/*
+ * var_expand() - variable-expand input string into list of strings.
+ *
+ * Would just copy input to output, performing variable expansion, except that
+ * since variables can contain multiple values the result of variable expansion
+ * may contain multiple values (a list). Properly performs "product" operations
+ * that occur in "$(var1)xxx$(var2)" or even "$($(var2))".
+ *
+ * Returns a newly created list.
+ */
+
+LIST * var_expand( LIST * l, char * in, char * end, LOL * lol, int cancopyin )
+{
+ char out_buf[ MAXSYM ];
+ string buf[ 1 ];
+ string out1[ 1 ]; /* temporary buffer */
+ size_t prefix_length;
+ char * out;
+ char * inp = in;
+ char * ov; /* for temp copy of variable in outbuf */
+ int depth;
+
+ if ( DEBUG_VAREXP )
+ printf( "expand '%.*s'\n", end - in, in );
+
+ /* This gets a lot of cases: $(<) and $(>). */
+ if
+ (
+ ( in[ 0 ] == '$' ) &&
+ ( in[ 1 ] == '(' ) &&
+ ( in[ 3 ] == ')' ) &&
+ ( in[ 4 ] == '\0' )
+ )
+ {
+ switch ( in[ 2 ] )
+ {
+ case '<': return list_copy( l, lol_get( lol, 0 ) );
+ case '>': return list_copy( l, lol_get( lol, 1 ) );
+
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return list_copy( l, lol_get( lol, in[ 2 ] - '1' ) );
+ }
+ }
+ else if ( in[0] == '$' && in[1] == '(' && in[2] == '1' && in[4] == ')' &&
+ in[5] == '\0') {
+
+ switch( in[3] )
+ {
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return list_copy( l, lol_get( lol, in[3]-'0'+10-1 ) );
+ }
+ }
+
+ /* Expand @() files, to single item plus accompanying file. */
+ if ( ( in[ 0 ] == '@' ) && ( in[ 1 ] == '(' ) && ( *( end - 1 ) == ')' ) )
+ {
+ /* We try the expansion until it fits within the propective output
+ * buffer.
+ */
+ char * at_buf = 0;
+ int at_size = MAXJPATH;
+ int at_len = 0;
+ do
+ {
+ BJAM_FREE( at_buf );
+ at_buf = (char *)BJAM_MALLOC_ATOMIC( at_size + 1 );
+ at_len = var_string( in, at_buf, at_size, lol );
+ at_size *= 2;
+ }
+ while ( ( at_len < 0 ) && ( at_size < INT_MAX / 2 ) );
+ /* Return the result as a single item list. */
+ if ( at_len > 0 )
+ {
+ LIST * r;
+ string_copy( buf, at_buf );
+ r = list_new( l, newstr( buf->value ) );
+ string_free( buf );
+ BJAM_FREE( at_buf );
+ return r;
+ }
+ BJAM_FREE( at_buf );
+ }
+
+ /* Just try simple copy of in to out. */
+ while ( in < end )
+ if ( ( *in++ == '$' ) && ( *in == '(' ) )
+ goto expand;
+
+ /* No variables expanded - just add copy of input string to list. */
+
+ /* 'cancopyin' is an optimization: if the input was already a list item, we
+ * can use copystr() to put it on the new list. Otherwise, we use the slower
+ * newstr().
+ */
+ if ( cancopyin )
+ return list_new( l, copystr( inp ) );
+
+ {
+ LIST * r;
+ string_new( buf );
+ string_append_range( buf, inp, end );
+ r = list_new( l, newstr( buf->value ) );
+ string_free( buf );
+ return r;
+ }
+
+expand:
+ string_new( buf );
+ string_append_range( buf, inp, in - 1 ); /* Copy the part before '$'. */
+ /*
+ * Input so far (ignore blanks):
+ *
+ * stuff-in-outbuf $(variable) remainder
+ * ^ ^
+ * in end
+ * Output so far:
+ *
+ * stuff-in-outbuf $
+ * ^ ^
+ * out_buf out
+ *
+ *
+ * We just copied the $ of $(...), so back up one on the output. We now find
+ * the matching close paren, copying the variable and modifiers between the
+ * $( and ) temporarily into out_buf, so that we can replace :'s with
+ * MAGIC_COLON. This is necessary to avoid being confused by modifier values
+ * that are variables containing :'s. Ugly.
+ */
+
+ depth = 1;
+ inp = ++in; /* Skip over the '('. */
+
+ while ( ( in < end ) && depth )
+ {
+ switch ( *in++ )
+ {
+ case '(': ++depth; break;
+ case ')': --depth; break;
+ }
+ }
+
+ /*
+ * Input so far (ignore blanks):
+ *
+ * stuff-in-outbuf $(variable) remainder
+ * ^ ^ ^
+ * inp in end
+ */
+ prefix_length = buf->size;
+ string_append_range( buf, inp, in - 1 );
+
+ out = buf->value + prefix_length;
+ for ( ov = out; ov < buf->value + buf->size; ++ov )
+ {
+ switch ( *ov )
+ {
+ case ':': *ov = MAGIC_COLON; break;
+ case '[': *ov = MAGIC_LEFT ; break;
+ case ']': *ov = MAGIC_RIGHT; break;
+ }
+ }
+
+ /*
+ * Input so far (ignore blanks):
+ *
+ * stuff-in-outbuf $(variable) remainder
+ * ^ ^
+ * in end
+ * Output so far:
+ *
+ * stuff-in-outbuf variable
+ * ^ ^ ^
+ * out_buf out ov
+ *
+ * Later we will overwrite 'variable' in out_buf, but we will be done with
+ * it by then. 'variable' may be a multi-element list, so may each value for
+ * '$(variable element)', and so may 'remainder'. Thus we produce a product
+ * of three lists.
+ */
+ {
+ LIST * variables = 0;
+ LIST * remainder = 0;
+ LIST * vars;
+
+ /* Recursively expand variable name & rest of input. */
+ if ( out < ov ) variables = var_expand( L0, out, ov, lol, 0 );
+ if ( in < end ) remainder = var_expand( L0, in, end, lol, 0 );
+
+ /* Now produce the result chain. */
+
+ /* For each variable name. */
+ for ( vars = variables; vars; vars = list_next( vars ) )
+ {
+ LIST * value = 0;
+ LIST * evalue = 0;
+ char * colon;
+ char * bracket;
+ string variable[1];
+ char * varname;
+ int sub1 = 0;
+ int sub2 = -1;
+ VAR_EDITS edits;
+
+ /* Look for a : modifier in the variable name. Must copy into
+ * varname so we can modify it.
+ */
+ string_copy( variable, vars->string );
+ varname = variable->value;
+
+ if ( ( colon = strchr( varname, MAGIC_COLON ) ) )
+ {
+ string_truncate( variable, colon - varname );
+ var_edit_parse( colon + 1, &edits );
+ }
+
+ /* Look for [x-y] subscripting. sub1 and sub2 are x and y. */
+ if ( ( bracket = strchr( varname, MAGIC_LEFT ) ) )
+ {
+ /* Make all syntax errors in [] subscripting result in the same
+ * behavior: silenty return an empty expansion (by setting sub2
+ * = 0). Brute force parsing; May get moved into yacc someday.
+ */
+
+ char * s = bracket + 1;
+
+ string_truncate( variable, bracket - varname );
+
+ do /* so we can use "break" */
+ {
+ /* Allow negative indexes. */
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ sub2 = 0;
+ break;
+ }
+ sub1 = atoi( s );
+
+ /* Skip over the first symbol, which is either a digit or dash. */
+ ++s;
+ while ( isdigit( *s ) ) ++s;
+
+ if ( *s == MAGIC_RIGHT )
+ {
+ sub2 = sub1;
+ break;
+ }
+
+ if ( *s != '-' )
+ {
+ sub2 = 0;
+ break;
+ }
+
+ ++s;
+
+ if ( *s == MAGIC_RIGHT )
+ {
+ sub2 = -1;
+ break;
+ }
+
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ sub2 = 0;
+ break;
+ }
+
+ /* First, compute the index of the last element. */
+ sub2 = atoi( s );
+ while ( isdigit( *++s ) );
+
+ if ( *s != MAGIC_RIGHT )
+ sub2 = 0;
+
+ } while ( 0 );
+
+ /* Anything but the end of the string, or the colon introducing
+ * a modifier is a syntax error.
+ */
+ ++s;
+ if ( *s && ( *s != MAGIC_COLON ) )
+ sub2 = 0;
+
+ *bracket = '\0';
+ }
+
+ /* Get variable value, with special handling for $(<), $(>), $(n).
+ */
+ if ( !varname[1] )
+ {
+ if ( varname[0] == '<' )
+ value = lol_get( lol, 0 );
+ else if ( varname[0] == '>' )
+ value = lol_get( lol, 1 );
+ else if ( ( varname[0] >= '1' ) && ( varname[0] <= '9' ) )
+ value = lol_get( lol, varname[0] - '1' );
+ else if( varname[0] == '1' && varname[1] >= '0' &&
+ varname[1] <= '9' && !varname[2] )
+ value = lol_get( lol, varname[1] - '0' + 10 - 1 );
+ }
+
+ if ( !value )
+ value = var_get( varname );
+
+ /* Handle negitive indexes: part two. */
+ {
+ int length = list_length( value );
+
+ if ( sub1 < 0 )
+ sub1 = length + sub1;
+ else
+ sub1 -= 1;
+
+ if ( sub2 < 0 )
+ sub2 = length + 1 + sub2 - sub1;
+ else
+ sub2 -= sub1;
+ /* The "sub2 < 0" test handles the semantic error of sub2 <
+ * sub1.
+ */
+ if ( sub2 < 0 )
+ sub2 = 0;
+ }
+
+ /* The fast path: $(x) - just copy the variable value. This is only
+ * an optimization.
+ */
+ if ( ( out == out_buf ) && !bracket && !colon && ( in == end ) )
+ {
+ string_free( variable );
+ l = list_copy( l, value );
+ continue;
+ }
+
+ /* Handle start subscript. */
+ while ( ( sub1 > 0 ) && value )
+ --sub1, value = list_next( value );
+
+ /* Empty w/ :E=default?. */
+ if ( !value && colon && edits.empty.ptr )
+ evalue = value = list_new( L0, newstr( edits.empty.ptr ) );
+
+ /* For each variable value. */
+ string_new( out1 );
+ for ( ; value; value = list_next( value ) )
+ {
+ LIST * rem;
+ size_t postfix_start;
+
+ /* Handle end subscript (length actually). */
+
+ if ( sub2 >= 0 && --sub2 < 0 )
+ break;
+
+ string_truncate( buf, prefix_length );
+
+ /* Apply : mods, if present */
+
+ if ( colon && edits.filemods )
+ var_edit_file( value->string, out1, &edits );
+ else
+ string_append( out1, value->string );
+
+ if ( colon && ( edits.upshift || edits.downshift || edits.to_slashes || edits.to_windows ) )
+ var_edit_shift( out1, &edits );
+
+ /* Handle :J=joinval */
+ /* If we have more values for this var, just keep appending them
+ * (using the join value) rather than creating separate LIST
+ * elements.
+ */
+ if ( colon && edits.join.ptr &&
+ ( list_next( value ) || list_next( vars ) ) )
+ {
+ string_append( out1, edits.join.ptr );
+ continue;
+ }
+
+ string_append( buf, out1->value );
+ string_free( out1 );
+ string_new( out1 );
+
+ /* If no remainder, append result to output chain. */
+ if ( in == end )
+ {
+ l = list_new( l, newstr( buf->value ) );
+ continue;
+ }
+
+ /* For each remainder, append the complete string to the output
+ * chain. Remember the end of the variable expansion so we can
+ * just tack on each instance of 'remainder'.
+ */
+ postfix_start = buf->size;
+ for ( rem = remainder; rem; rem = list_next( rem ) )
+ {
+ string_truncate( buf, postfix_start );
+ string_append( buf, rem->string );
+ l = list_new( l, newstr( buf->value ) );
+ }
+ }
+ string_free( out1 );
+
+ /* Toss used empty. */
+ if ( evalue )
+ list_free( evalue );
+
+ string_free( variable );
+ }
+
+ /* variables & remainder were gifts from var_expand and must be freed. */
+ if ( variables ) list_free( variables );
+ if ( remainder ) list_free( remainder );
+
+ if ( DEBUG_VAREXP )
+ {
+ printf( "expanded to " );
+ list_print( l );
+ printf( "\n" );
+ }
+
+ string_free( buf );
+ return l;
+ }
+}
+
+
+/*
+ * var_edit_parse() - parse : modifiers into PATHNAME structure
+ *
+ * The : modifiers in a $(varname:modifier) currently support replacing or
+ * omitting elements of a filename, and so they are parsed into a PATHNAME
+ * structure (which contains pointers into the original string).
+ *
+ * Modifiers of the form "X=value" replace the component X with the given value.
+ * Modifiers without the "=value" cause everything but the component X to be
+ * omitted. X is one of:
+ *
+ * G <grist>
+ * D directory name
+ * B base name
+ * S .suffix
+ * M (member)
+ * R root directory - prepended to whole path
+ *
+ * This routine sets:
+ *
+ * f->f_xxx.ptr = 0
+ * f->f_xxx.len = 0
+ * -> leave the original component xxx
+ *
+ * f->f_xxx.ptr = string
+ * f->f_xxx.len = strlen( string )
+ * -> replace component xxx with string
+ *
+ * f->f_xxx.ptr = ""
+ * f->f_xxx.len = 0
+ * -> omit component xxx
+ *
+ * var_edit_file() below and path_build() obligingly follow this convention.
+ */
+
+static void var_edit_parse( char * mods, VAR_EDITS * edits )
+{
+ int havezeroed = 0;
+ memset( (char *)edits, 0, sizeof( *edits ) );
+
+ while ( *mods )
+ {
+ char * p;
+ PATHPART * fp;
+
+ switch ( *mods++ )
+ {
+ case 'L': edits->downshift = 1; continue;
+ case 'U': edits->upshift = 1; continue;
+ case 'P': edits->parent = edits->filemods = 1; continue;
+ case 'E': fp = &edits->empty; goto strval;
+ case 'J': fp = &edits->join; goto strval;
+ case 'G': fp = &edits->f.f_grist; goto fileval;
+ case 'R': fp = &edits->f.f_root; goto fileval;
+ case 'D': fp = &edits->f.f_dir; goto fileval;
+ case 'B': fp = &edits->f.f_base; goto fileval;
+ case 'S': fp = &edits->f.f_suffix; goto fileval;
+ case 'M': fp = &edits->f.f_member; goto fileval;
+ case 'T': edits->to_slashes = 1; continue;
+ case 'W': edits->to_windows = 1; continue;
+ default:
+ return; /* Should complain, but so what... */
+ }
+
+ fileval:
+ /* Handle :CHARS, where each char (without a following =) selects a
+ * particular file path element. On the first such char, we deselect all
+ * others (by setting ptr = "", len = 0) and for each char we select
+ * that element (by setting ptr = 0).
+ */
+ edits->filemods = 1;
+
+ if ( *mods != '=' )
+ {
+ if ( !havezeroed++ )
+ {
+ int i;
+ for ( i = 0; i < 6; ++i )
+ {
+ edits->f.part[ i ].len = 0;
+ edits->f.part[ i ].ptr = "";
+ }
+ }
+
+ fp->ptr = 0;
+ continue;
+ }
+
+ strval:
+ /* Handle :X=value, or :X */
+ if ( *mods != '=' )
+ {
+ fp->ptr = "";
+ fp->len = 0;
+ }
+ else if ( ( p = strchr( mods, MAGIC_COLON ) ) )
+ {
+ *p = 0;
+ fp->ptr = ++mods;
+ fp->len = p - mods;
+ mods = p + 1;
+ }
+ else
+ {
+ fp->ptr = ++mods;
+ fp->len = strlen( mods );
+ mods += fp->len;
+ }
+ }
+}
+
+
+/*
+ * var_edit_file() - copy input target name to output, modifying filename.
+ */
+
+static void var_edit_file( char * in, string * out, VAR_EDITS * edits )
+{
+ PATHNAME pathname;
+
+ /* Parse apart original filename, putting parts into "pathname". */
+ path_parse( in, &pathname );
+
+ /* Replace any pathname with edits->f */
+ if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist;
+ if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root;
+ if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir;
+ if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base;
+ if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix;
+ if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member;
+
+ /* If requested, modify pathname to point to parent. */
+ if ( edits->parent )
+ path_parent( &pathname );
+
+ /* Put filename back together. */
+ path_build( &pathname, out, 0 );
+}
+
+
+/*
+ * var_edit_shift() - do upshift/downshift mods.
+ */
+
+static void var_edit_shift( string * out, VAR_EDITS * edits )
+{
+ /* Handle upshifting, downshifting and slash translation now. */
+ char * p;
+ for ( p = out->value; *p; ++p)
+ {
+ if ( edits->upshift )
+ *p = toupper( *p );
+ else if ( edits->downshift )
+ *p = tolower( *p );
+ if ( edits->to_slashes && ( *p == '\\' ) )
+ *p = '/';
+# ifdef OS_CYGWIN
+ if ( edits->to_windows )
+ {
+ char result[ MAX_PATH + 1 ];
+ cygwin_conv_to_win32_path( out->value, result );
+ assert( strlen( result ) <= MAX_PATH );
+ string_free( out );
+ string_copy( out, result );
+ }
+# endif
+ }
+ out->size = p - out->value;
+}
+
+
+#ifndef NDEBUG
+void var_expand_unit_test()
+{
+ LOL lol[ 1 ];
+ LIST * l;
+ LIST * l2;
+ LIST * expected = list_new( list_new( L0, newstr( "axb" ) ), newstr( "ayb" ) );
+ LIST * e2;
+ char axyb[] = "a$(xy)b";
+ char azb[] = "a$($(z))b";
+ char path[] = "$(p:W)";
+
+# ifdef OS_CYGWIN
+ char cygpath[ 256 ];
+ cygwin_conv_to_posix_path( "c:\\foo\\bar", cygpath );
+# else
+ char cygpath[] = "/cygdrive/c/foo/bar";
+# endif
+
+ lol_init(lol);
+ var_set( "xy", list_new( list_new( L0, newstr( "x" ) ), newstr( "y" ) ), VAR_SET );
+ var_set( "z", list_new( L0, newstr( "xy" ) ), VAR_SET );
+ var_set( "p", list_new( L0, newstr( cygpath ) ), VAR_SET );
+
+ l = var_expand( 0, axyb, axyb + sizeof( axyb ) - 1, lol, 0 );
+ for ( l2 = l, e2 = expected; l2 && e2; l2 = list_next( l2 ), e2 = list_next( e2 ) )
+ assert( !strcmp( e2->string, l2->string ) );
+ assert( l2 == 0 );
+ assert( e2 == 0 );
+ list_free( l );
+
+ l = var_expand( 0, azb, azb + sizeof( azb ) - 1, lol, 0 );
+ for ( l2 = l, e2 = expected; l2 && e2; l2 = list_next( l2 ), e2 = list_next( e2 ) )
+ assert( !strcmp( e2->string, l2->string ) );
+ assert( l2 == 0 );
+ assert( e2 == 0 );
+ list_free( l );
+
+ l = var_expand( 0, path, path + sizeof( path ) - 1, lol, 0 );
+ assert( l != 0 );
+ assert( list_next( l ) == 0 );
+# ifdef OS_CYGWIN
+ /* On some installations of cygwin the drive letter is expanded to other
+ * case. This has been reported to be the case if cygwin has been installed
+ * to C:\ as opposed to C:\cygwin. Since case of the drive letter will not
+ * matter, we allow for both.
+ */
+ assert( !strcmp( l->string, "c:\\foo\\bar" ) ||
+ !strcmp( l->string, "C:\\foo\\bar" ) );
+# else
+ assert( !strcmp( l->string, cygpath ) );
+# endif
+ list_free( l );
+ list_free( expected );
+ lol_free( lol );
+}
+#endif
diff --git a/jam-files/engine/expand.h b/jam-files/engine/expand.h
new file mode 100644
index 000000000..cc25d1909
--- /dev/null
+++ b/jam-files/engine/expand.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * expand.h - expand a buffer, given variable values
+ */
+
+#include "lists.h"
+
+LIST *var_expand( LIST *l, char *in, char *end, LOL *lol, int cancopyin );
+void var_expand_unit_test();
diff --git a/jam-files/engine/filemac.c b/jam-files/engine/filemac.c
new file mode 100644
index 000000000..e69aa648f
--- /dev/null
+++ b/jam-files/engine/filemac.c
@@ -0,0 +1,175 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "filesys.h"
+# include "pathsys.h"
+
+# ifdef OS_MAC
+
+#include <Files.h>
+#include <Folders.h>
+
+# include <:sys:stat.h>
+
+/*
+ * filemac.c - manipulate file names and scan directories on macintosh
+ *
+ * External routines:
+ *
+ * file_dirscan() - scan a directory for files
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ * file_archscan() - scan an archive for files
+ *
+ * File_dirscan() and file_archscan() call back a caller provided function
+ * for each file found. A flag to this callback function lets file_dirscan()
+ * and file_archscan() indicate that a timestamp is being provided with the
+ * file. If file_dirscan() or file_archscan() do not provide the file's
+ * timestamp, interested parties may later call file_time().
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 12/19/94 (mikem) - solaris string table insanity support
+ * 02/14/95 (seiwald) - parse and build /xxx properly
+ * 05/03/96 (seiwald) - split into pathunix.c
+ * 11/21/96 (peterk) - BEOS does not have Unix-style archives
+ */
+
+
+void CopyC2PStr( char const * cstr, StringPtr pstr )
+{
+ int len;
+ for ( len = 0; *cstr && ( len < 255 ); pstr[ ++len ] = *cstr++ );
+ pstr[ 0 ] = len;
+}
+
+
+/*
+ * file_dirscan() - scan a directory for files.
+ */
+
+void file_dirscan( char * dir, scanback func, void * closure )
+{
+ PATHNAME f;
+ string filename[ 1 ];
+ unsigned char fullPath[ 512 ];
+
+ FSSpec spec;
+ WDPBRec vol;
+ Str63 volName;
+ CInfoPBRec lastInfo;
+ int index = 1;
+
+ /* First enter directory itself. */
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+
+ f.f_dir.ptr = dir;
+ f.f_dir.len = strlen(dir);
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", dir );
+
+ /* Special case ":" - enter it */
+
+ if ( ( f.f_dir.len == 1 ) && ( f.f_dir.ptr[0] == ':' ) )
+ (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
+
+ /* Now enter contents of directory */
+
+ vol.ioNamePtr = volName;
+
+ if ( PBHGetVolSync( &vol ) )
+ return;
+
+ CopyC2PStr( dir, fullPath );
+
+ if ( FSMakeFSSpec( vol.ioWDVRefNum, vol.ioWDDirID, fullPath, &spec ) )
+ return;
+
+ lastInfo.dirInfo.ioVRefNum = spec.vRefNum;
+ lastInfo.dirInfo.ioDrDirID = spec.parID;
+ lastInfo.dirInfo.ioNamePtr = spec.name;
+ lastInfo.dirInfo.ioFDirIndex = 0;
+ lastInfo.dirInfo.ioACUser = 0;
+
+ if ( PBGetCatInfoSync( &lastInfo ) )
+ return;
+
+ if ( !( lastInfo.dirInfo.ioFlAttrib & 0x10 ) )
+ return;
+
+ /* ioDrDirID must be reset each time. */
+ spec.parID = lastInfo.dirInfo.ioDrDirID;
+
+ string_new( filename );
+ for ( ; ; )
+ {
+ lastInfo.dirInfo.ioVRefNum = spec.vRefNum;
+ lastInfo.dirInfo.ioDrDirID = spec.parID;
+ lastInfo.dirInfo.ioNamePtr = fullPath;
+ lastInfo.dirInfo.ioFDirIndex = index++;
+
+ if ( PBGetCatInfoSync( &lastInfo ) )
+ return;
+
+ f.f_base.ptr = (char *)fullPath + 1;
+ f.f_base.len = *fullPath;
+
+ string_truncate( filename, 0 );
+ path_build( &f, filename, 0 );
+ (*func)( closure, filename->value, 0 /* not stat()'ed */, (time_t)0 );
+ }
+ string_free( filename );
+}
+
+
+/*
+ * file_time() - get timestamp of file, if not done by file_dirscan().
+ */
+
+int file_time( char * filename, time_t * time )
+{
+ struct stat statbuf;
+
+ if ( stat( filename, &statbuf ) < 0 )
+ return -1;
+
+ *time = statbuf.st_mtime;
+
+ return 0;
+}
+
+
+int file_is_file( char * filename )
+{
+ struct stat statbuf;
+ if ( stat( filename, &statbuf ) < 0 )
+ return -1;
+ return S_ISREG( statbuf.st_mode ) ? 1 : 0;
+}
+
+int file_mkdir(char *pathname)
+{
+ return mkdir(pathname, 0766);
+}
+
+
+/*
+ * file_archscan() - scan an archive for files.
+ */
+
+void file_archscan( char * archive, scanback func, void * closure )
+{
+}
+
+
+# endif /* macintosh */
diff --git a/jam-files/engine/filent.c b/jam-files/engine/filent.c
new file mode 100644
index 000000000..ab1895767
--- /dev/null
+++ b/jam-files/engine/filent.c
@@ -0,0 +1,387 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+
+# include "filesys.h"
+# include "pathsys.h"
+# include "strings.h"
+# include "newstr.h"
+
+# ifdef OS_NT
+
+# ifdef __BORLANDC__
+# if __BORLANDC__ < 0x550
+# include <dir.h>
+# include <dos.h>
+# endif
+# undef FILENAME /* cpp namespace collision */
+# define _finddata_t ffblk
+# endif
+
+# include <io.h>
+# include <sys/stat.h>
+# include <ctype.h>
+# include <direct.h>
+
+/*
+ * filent.c - scan directories and archives on NT
+ *
+ * External routines:
+ *
+ * file_dirscan() - scan a directory for files
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ * file_archscan() - scan an archive for files
+ *
+ * File_dirscan() and file_archscan() call back a caller provided function
+ * for each file found. A flag to this callback function lets file_dirscan()
+ * and file_archscan() indicate that a timestamp is being provided with the
+ * file. If file_dirscan() or file_archscan() do not provide the file's
+ * timestamp, interested parties may later call file_time().
+ *
+ * 07/10/95 (taylor) Findfirst() returns the first file on NT.
+ * 05/03/96 (seiwald) split apart into pathnt.c
+ */
+
+/*
+ * file_dirscan() - scan a directory for files
+ */
+
+void file_dirscan( char * dir, scanback func, void * closure )
+{
+ PROFILE_ENTER( FILE_DIRSCAN );
+
+ file_info_t * d = 0;
+
+ dir = short_path_to_long_path( dir );
+
+ /* First enter directory itself */
+
+ d = file_query( dir );
+
+ if ( !d || !d->is_dir )
+ {
+ PROFILE_EXIT( FILE_DIRSCAN );
+ return;
+ }
+
+ if ( !d->files )
+ {
+ PATHNAME f;
+ string filespec[ 1 ];
+ string filename[ 1 ];
+ long handle;
+ int ret;
+ struct _finddata_t finfo[ 1 ];
+ LIST * files = L0;
+ int d_length = strlen( d->name );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+
+ f.f_dir.ptr = d->name;
+ f.f_dir.len = d_length;
+
+ /* Now enter contents of directory */
+
+ /* Prepare file search specification for the findfirst() API. */
+ if ( d_length == 0 )
+ string_copy( filespec, ".\\*" );
+ else
+ {
+ /*
+ * We can not simply assume the given folder name will never include
+ * its trailing path separator or otherwise we would not support the
+ * Windows root folder specified without its drive letter, i.e. '\'.
+ */
+ char trailingChar = d->name[ d_length - 1 ] ;
+ string_copy( filespec, d->name );
+ if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
+ string_append( filespec, "\\" );
+ string_append( filespec, "*" );
+ }
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", dir );
+
+ #if defined(__BORLANDC__) && __BORLANDC__ < 0x550
+ if ( ret = findfirst( filespec->value, finfo, FA_NORMAL | FA_DIREC ) )
+ {
+ string_free( filespec );
+ PROFILE_EXIT( FILE_DIRSCAN );
+ return;
+ }
+
+ string_new ( filename );
+ while ( !ret )
+ {
+ file_info_t * ff = 0;
+
+ f.f_base.ptr = finfo->ff_name;
+ f.f_base.len = strlen( finfo->ff_name );
+
+ string_truncate( filename, 0 );
+ path_build( &f, filename );
+
+ files = list_new( files, newstr(filename->value) );
+ ff = file_info( filename->value );
+ ff->is_file = finfo->ff_attrib & FA_DIREC ? 0 : 1;
+ ff->is_dir = finfo->ff_attrib & FA_DIREC ? 1 : 0;
+ ff->size = finfo->ff_fsize;
+ ff->time = (finfo->ff_ftime << 16) | finfo->ff_ftime;
+
+ ret = findnext( finfo );
+ }
+ # else
+ handle = _findfirst( filespec->value, finfo );
+
+ if ( ret = ( handle < 0L ) )
+ {
+ string_free( filespec );
+ PROFILE_EXIT( FILE_DIRSCAN );
+ return;
+ }
+
+ string_new( filename );
+ while ( !ret )
+ {
+ file_info_t * ff = 0;
+
+ f.f_base.ptr = finfo->name;
+ f.f_base.len = strlen( finfo->name );
+
+ string_truncate( filename, 0 );
+ path_build( &f, filename, 0 );
+
+ files = list_new( files, newstr( filename->value ) );
+ ff = file_info( filename->value );
+ ff->is_file = finfo->attrib & _A_SUBDIR ? 0 : 1;
+ ff->is_dir = finfo->attrib & _A_SUBDIR ? 1 : 0;
+ ff->size = finfo->size;
+ ff->time = finfo->time_write;
+
+ ret = _findnext( handle, finfo );
+ }
+
+ _findclose( handle );
+ # endif
+ string_free( filename );
+ string_free( filespec );
+
+ d->files = files;
+ }
+
+ /* Special case \ or d:\ : enter it */
+ {
+ unsigned long len = strlen(d->name);
+ if ( len == 1 && d->name[0] == '\\' )
+ (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
+ else if ( len == 3 && d->name[1] == ':' ) {
+ (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
+ /* We've just entered 3-letter drive name spelling (with trailing
+ slash), into the hash table. Now enter two-letter variant,
+ without trailing slash, so that if we try to check whether
+ "c:" exists, we hit it.
+
+ Jam core has workarounds for that. Given:
+ x = c:\whatever\foo ;
+ p = $(x:D) ;
+ p2 = $(p:D) ;
+ There will be no trailing slash in $(p), but there will be one
+ in $(p2). But, that seems rather fragile.
+ */
+ d->name[2] = 0;
+ (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
+ }
+ }
+
+ /* Now enter contents of directory */
+ if ( d->files )
+ {
+ LIST * files = d->files;
+ while ( files )
+ {
+ file_info_t * ff = file_info( files->string );
+ (*func)( closure, ff->name, 1 /* stat()'ed */, ff->time );
+ files = list_next( files );
+ }
+ }
+
+ PROFILE_EXIT( FILE_DIRSCAN );
+}
+
+file_info_t * file_query( char * filename )
+{
+ file_info_t * ff = file_info( filename );
+ if ( ! ff->time )
+ {
+ struct stat statbuf;
+
+ if ( stat( *filename ? filename : ".", &statbuf ) < 0 )
+ return 0;
+
+ ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
+ ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
+ ff->size = statbuf.st_size;
+ ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
+ }
+ return ff;
+}
+
+/*
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ */
+
+int
+file_time(
+ char *filename,
+ time_t *time )
+{
+ file_info_t * ff = file_query( filename );
+ if ( !ff ) return -1;
+ *time = ff->time;
+ return 0;
+}
+
+int file_is_file(char* filename)
+{
+ file_info_t * ff = file_query( filename );
+ if ( !ff ) return -1;
+ return ff->is_file;
+}
+
+int file_mkdir(char *pathname)
+{
+ return _mkdir(pathname);
+}
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+
+/* Straight from SunOS */
+
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+
+#define ARFMAG "`\n"
+
+struct ar_hdr {
+ char ar_name[16];
+ char ar_date[12];
+ char ar_uid[6];
+ char ar_gid[6];
+ char ar_mode[8];
+ char ar_size[10];
+ char ar_fmag[2];
+};
+
+# define SARFMAG 2
+# define SARHDR sizeof( struct ar_hdr )
+
+void
+file_archscan(
+ char *archive,
+ scanback func,
+ void *closure )
+{
+ struct ar_hdr ar_hdr;
+ char *string_table = 0;
+ char buf[ MAXJPATH ];
+ long offset;
+ int fd;
+
+ if ( ( fd = open( archive, O_RDONLY | O_BINARY, 0 ) ) < 0 )
+ return;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG ||
+ strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
+ !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
+ {
+ long lar_date;
+ long lar_size;
+ char *name = 0;
+ char *endname;
+ char *c;
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ lar_size = ( lar_size + 1 ) & ~1;
+
+ if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] == '/' )
+ {
+ /* this is the "string table" entry of the symbol table,
+ ** which holds strings of filenames that are longer than
+ ** 15 characters (ie. don't fit into a ar_name
+ */
+
+ string_table = BJAM_MALLOC_ATOMIC(lar_size+1);
+ if (read(fd, string_table, lar_size) != lar_size)
+ printf("error reading string table\n");
+ string_table[lar_size] = '\0';
+ offset += SARHDR + lar_size;
+ continue;
+ }
+ else if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] != ' ')
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is
+ ** the offset of the string in the string table represented
+ ** in ASCII decimals.
+ */
+
+ name = string_table + atoi( ar_hdr.ar_name + 1 );
+ for ( endname = name; *endname && *endname != '\n'; ++endname) {}
+ }
+ else
+ {
+ /* normal name */
+ name = ar_hdr.ar_name;
+ endname = name + sizeof( ar_hdr.ar_name );
+ }
+
+ /* strip trailing white-space, slashes, and backslashes */
+
+ while ( endname-- > name )
+ if ( !isspace(*endname) && ( *endname != '\\' ) && ( *endname != '/' ) )
+ break;
+ *++endname = 0;
+
+ /* strip leading directory names, an NT specialty */
+
+ if ( c = strrchr( name, '/' ) )
+ name = c + 1;
+ if ( c = strrchr( name, '\\' ) )
+ name = c + 1;
+
+ sprintf( buf, "%s(%.*s)", archive, endname - name, name );
+ (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
+
+ offset += SARHDR + lar_size;
+ lseek( fd, offset, 0 );
+ }
+
+ close( fd );
+}
+
+# endif /* NT */
diff --git a/jam-files/engine/fileos2.c b/jam-files/engine/fileos2.c
new file mode 100644
index 000000000..af2373ea8
--- /dev/null
+++ b/jam-files/engine/fileos2.c
@@ -0,0 +1,138 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "filesys.h"
+# include "pathsys.h"
+
+/* note that we use "fileunix.c" when compiling with EMX on OS/2 */
+# if defined(OS_OS2) && !defined(__EMX__)
+
+# include <io.h>
+# include <dos.h>
+
+/*
+ * fileos2.c - scan directories and archives on NT
+ *
+ * External routines:
+ *
+ * file_dirscan() - scan a directory for files
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ * file_archscan() - scan an archive for files
+ *
+ * File_dirscan() and file_archscan() call back a caller provided function
+ * for each file found. A flag to this callback function lets file_dirscan()
+ * and file_archscan() indicate that a timestamp is being provided with the
+ * file. If file_dirscan() or file_archscan() do not provide the file's
+ * timestamp, interested parties may later call file_time().
+ *
+ * 07/10/95 (taylor) Findfirst() returns the first file on NT.
+ * 05/03/96 (seiwald) split apart into pathnt.c
+ * 09/22/00 (seiwald) handle \ and c:\ specially: don't add extra /
+ */
+
+/*
+ * file_dirscan() - scan a directory for files
+ */
+
+void
+file_dirscan(
+ char *dir,
+ scanback func,
+ void *closure )
+{
+ PATHNAME f;
+ string filespec[1];
+ long handle;
+ int ret;
+ struct _find_t finfo[1];
+
+ /* First enter directory itself */
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+
+ f.f_dir.ptr = dir;
+ f.f_dir.len = strlen(dir);
+
+ dir = *dir ? dir : ".";
+
+ /* Special case \ or d:\ : enter it */
+ string_copy( filespec, dir );
+
+ if ( f.f_dir.len == 1 && f.f_dir.ptr[0] == '\\' )
+ (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
+ else if ( f.f_dir.len == 3 && f.f_dir.ptr[1] == ':' )
+ (*func)( closure, dir, 0 /* not stat()'ed */, (time_t)0 );
+ else
+ string_push_back( filespec, '/' );
+
+ string_push_back( filespec, '*' );
+
+ /* Now enter contents of directory */
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", filespec->value );
+
+ /* Time info in dos find_t is not very useful. It consists */
+ /* of a separate date and time, and putting them together is */
+ /* not easy. So we leave that to a later stat() call. */
+
+ if ( !_dos_findfirst( filespec->value, _A_NORMAL|_A_RDONLY|_A_SUBDIR, finfo ) )
+ {
+ string filename[1];
+ string_new( filename );
+ do
+ {
+ f.f_base.ptr = finfo->name;
+ f.f_base.len = strlen( finfo->name );
+
+ string_truncate( filename, 0 );
+ path_build( &f, filename, 0 );
+ (*func)( closure, filename->value, 0 /* not stat()'ed */, (time_t)0 );
+ }
+ while ( !_dos_findnext( finfo ) );
+ string_free( filename );
+ }
+}
+
+/*
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ */
+
+int
+file_time(
+ char *filename,
+ time_t *time )
+{
+ /* This is called on OS2, not NT. */
+ /* NT fills in the time in the dirscan. */
+
+ struct stat statbuf;
+
+ if ( stat( filename, &statbuf ) < 0 )
+ return -1;
+
+ *time = statbuf.st_mtime;
+
+ return 0;
+}
+
+void
+file_archscan(
+ char *archive,
+ scanback func,
+ void *closure )
+{
+}
+
+# endif /* OS2 && !__EMX__ */
+
diff --git a/jam-files/engine/filesys.c b/jam-files/engine/filesys.c
new file mode 100644
index 000000000..eb62ed406
--- /dev/null
+++ b/jam-files/engine/filesys.c
@@ -0,0 +1,83 @@
+# include "jam.h"
+# include "pathsys.h"
+# include "strings.h"
+# include "newstr.h"
+# include "filesys.h"
+# include "lists.h"
+
+void file_build1( PATHNAME * f, string * file )
+{
+ if ( DEBUG_SEARCH )
+ {
+ printf("build file: ");
+ if ( f->f_root.len )
+ printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
+ if ( f->f_dir.len )
+ printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
+ if ( f->f_base.len )
+ printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
+ printf( "\n" );
+ }
+
+ /* Start with the grist. If the current grist isn't */
+ /* surrounded by <>'s, add them. */
+
+ if ( f->f_grist.len )
+ {
+ if ( f->f_grist.ptr[0] != '<' )
+ string_push_back( file, '<' );
+ string_append_range(
+ file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
+ if ( file->value[file->size - 1] != '>' )
+ string_push_back( file, '>' );
+ }
+}
+
+static struct hash * filecache_hash = 0;
+static file_info_t filecache_finfo;
+
+file_info_t * file_info(char * filename)
+{
+ file_info_t *finfo = &filecache_finfo;
+
+ if ( !filecache_hash )
+ filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
+
+ finfo->name = filename;
+ finfo->is_file = 0;
+ finfo->is_dir = 0;
+ finfo->size = 0;
+ finfo->time = 0;
+ finfo->files = 0;
+ if ( hashenter( filecache_hash, (HASHDATA**)&finfo ) )
+ {
+ /* printf( "file_info: %s\n", filename ); */
+ finfo->name = newstr( finfo->name );
+ }
+
+ return finfo;
+}
+
+static LIST * files_to_remove = L0;
+
+static void remove_files_atexit(void)
+{
+ /* we do pop front in case this exit function is called
+ more than once */
+ while ( files_to_remove )
+ {
+ remove( files_to_remove->string );
+ files_to_remove = list_pop_front( files_to_remove );
+ }
+}
+
+void file_done()
+{
+ remove_files_atexit();
+ hashdone( filecache_hash );
+}
+
+void file_remove_atexit( const char * path )
+{
+ files_to_remove = list_new( files_to_remove, newstr((char*)path) );
+}
diff --git a/jam-files/engine/filesys.h b/jam-files/engine/filesys.h
new file mode 100644
index 000000000..efc081d12
--- /dev/null
+++ b/jam-files/engine/filesys.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filesys.h - OS specific file routines
+ */
+
+#ifndef FILESYS_DWA20011025_H
+# define FILESYS_DWA20011025_H
+
+# include "pathsys.h"
+#include "hash.h"
+#include "lists.h"
+
+typedef void (*scanback)( void *closure, char *file, int found, time_t t );
+
+void file_dirscan( char *dir, scanback func, void *closure );
+void file_archscan( char *arch, scanback func, void *closure );
+
+int file_time( char *filename, time_t *time );
+
+void file_build1(PATHNAME *f, string* file) ;
+int file_is_file(char* filename);
+int file_mkdir(char *pathname);
+
+typedef struct file_info_t file_info_t ;
+struct file_info_t
+{
+ char * name;
+ short is_file;
+ short is_dir;
+ unsigned long size;
+ time_t time;
+ LIST * files;
+};
+
+
+/* Creates a pointer to information about file 'filename', creating it as
+ * necessary. If created, the structure will be default initialized.
+ */
+file_info_t * file_info( char * filename );
+
+/* Returns information about a file, queries the OS if needed. */
+file_info_t * file_query( char * filename );
+
+void file_done();
+
+/* Marks a path/file to be removed when jam exits. */
+void file_remove_atexit( const char * path );
+
+#endif
diff --git a/jam-files/engine/fileunix.c b/jam-files/engine/fileunix.c
new file mode 100644
index 000000000..680c3f539
--- /dev/null
+++ b/jam-files/engine/fileunix.c
@@ -0,0 +1,501 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "filesys.h"
+# include "strings.h"
+# include "pathsys.h"
+# include "newstr.h"
+# include <stdio.h>
+# include <sys/stat.h>
+
+#if defined(sun) || defined(__sun) || defined(linux)
+# include <unistd.h> /* needed for read and close prototype */
+#endif
+
+# ifdef USE_FILEUNIX
+
+#if defined(sun) || defined(__sun)
+# include <unistd.h> /* needed for read and close prototype */
+#endif
+
+# if defined( OS_SEQUENT ) || \
+ defined( OS_DGUX ) || \
+ defined( OS_SCO ) || \
+ defined( OS_ISC )
+# define PORTAR 1
+# endif
+
+# ifdef __EMX__
+# include <sys/types.h>
+# include <sys/stat.h>
+# endif
+
+# if defined( OS_RHAPSODY ) || \
+ defined( OS_MACOSX ) || \
+ defined( OS_NEXT )
+/* need unistd for rhapsody's proper lseek */
+# include <sys/dir.h>
+# include <unistd.h>
+# define STRUCT_DIRENT struct direct
+# else
+# include <dirent.h>
+# define STRUCT_DIRENT struct dirent
+# endif
+
+# ifdef OS_COHERENT
+# include <arcoff.h>
+# define HAVE_AR
+# endif
+
+# if defined( OS_MVS ) || \
+ defined( OS_INTERIX )
+
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+#define ARFMAG "`\n"
+
+struct ar_hdr /* archive file member header - printable ascii */
+{
+ char ar_name[16]; /* file member name - `/' terminated */
+ char ar_date[12]; /* file member date - decimal */
+ char ar_uid[6]; /* file member user id - decimal */
+ char ar_gid[6]; /* file member group id - decimal */
+ char ar_mode[8]; /* file member mode - octal */
+ char ar_size[10]; /* file member size - decimal */
+ char ar_fmag[2]; /* ARFMAG - string to end header */
+};
+
+# define HAVE_AR
+# endif
+
+# if defined( OS_QNX ) || \
+ defined( OS_BEOS ) || \
+ defined( OS_MPEIX )
+# define NO_AR
+# define HAVE_AR
+# endif
+
+# ifndef HAVE_AR
+
+# ifdef OS_AIX
+/* Define those for AIX to get the definitions for both the small and the
+ * big variant of the archive file format. */
+# define __AR_SMALL__
+# define __AR_BIG__
+# endif
+
+# include <ar.h>
+# endif
+
+/*
+ * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
+ *
+ * External routines:
+ *
+ * file_dirscan() - scan a directory for files
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ * file_archscan() - scan an archive for files
+ *
+ * File_dirscan() and file_archscan() call back a caller provided function
+ * for each file found. A flag to this callback function lets file_dirscan()
+ * and file_archscan() indicate that a timestamp is being provided with the
+ * file. If file_dirscan() or file_archscan() do not provide the file's
+ * timestamp, interested parties may later call file_time().
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 12/19/94 (mikem) - solaris string table insanity support
+ * 02/14/95 (seiwald) - parse and build /xxx properly
+ * 05/03/96 (seiwald) - split into pathunix.c
+ * 11/21/96 (peterk) - BEOS does not have Unix-style archives
+ */
+
+
+/*
+ * file_dirscan() - scan a directory for files.
+ */
+
+void file_dirscan( char * dir, scanback func, void * closure )
+{
+ PROFILE_ENTER( FILE_DIRSCAN );
+
+ file_info_t * d = 0;
+
+ d = file_query( dir );
+
+ if ( !d || !d->is_dir )
+ {
+ PROFILE_EXIT( FILE_DIRSCAN );
+ return;
+ }
+
+ if ( ! d->files )
+ {
+ LIST* files = L0;
+ PATHNAME f;
+ DIR *dd;
+ STRUCT_DIRENT *dirent;
+ string filename[1];
+
+ /* First enter directory itself */
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+
+ f.f_dir.ptr = dir;
+ f.f_dir.len = strlen(dir);
+
+ dir = *dir ? dir : ".";
+
+ /* Now enter contents of directory. */
+
+ if ( !( dd = opendir( dir ) ) )
+ {
+ PROFILE_EXIT( FILE_DIRSCAN );
+ return;
+ }
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", dir );
+
+ string_new( filename );
+ while ( ( dirent = readdir( dd ) ) )
+ {
+ # ifdef old_sinix
+ /* Broken structure definition on sinix. */
+ f.f_base.ptr = dirent->d_name - 2;
+ # else
+ f.f_base.ptr = dirent->d_name;
+ # endif
+ f.f_base.len = strlen( f.f_base.ptr );
+
+ string_truncate( filename, 0 );
+ path_build( &f, filename, 0 );
+
+ files = list_new( files, newstr(filename->value) );
+ file_query( filename->value );
+ }
+ string_free( filename );
+
+ closedir( dd );
+
+ d->files = files;
+ }
+
+ /* Special case / : enter it */
+ {
+ unsigned long len = strlen(d->name);
+ if ( ( len == 1 ) && ( d->name[0] == '/' ) )
+ (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
+ }
+
+ /* Now enter contents of directory */
+ if ( d->files )
+ {
+ LIST * files = d->files;
+ while ( files )
+ {
+ file_info_t * ff = file_info( files->string );
+ (*func)( closure, ff->name, 1 /* stat()'ed */, ff->time );
+ files = list_next( files );
+ }
+ }
+
+ PROFILE_EXIT( FILE_DIRSCAN );
+}
+
+
+file_info_t * file_query( char * filename )
+{
+ file_info_t * ff = file_info( filename );
+ if ( ! ff->time )
+ {
+ struct stat statbuf;
+
+ if ( stat( *filename ? filename : ".", &statbuf ) < 0 )
+ return 0;
+
+ ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
+ ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
+ ff->size = statbuf.st_size;
+ ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
+ }
+ return ff;
+}
+
+/*
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ */
+
+int
+file_time(
+ char *filename,
+ time_t *time )
+{
+ file_info_t * ff = file_query( filename );
+ if ( !ff ) return -1;
+ *time = ff->time;
+ return 0;
+}
+
+int file_is_file(char* filename)
+{
+ file_info_t * ff = file_query( filename );
+ if ( !ff ) return -1;
+ return ff->is_file;
+}
+
+int file_mkdir(char* pathname)
+{
+ return mkdir(pathname, 0766);
+}
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+
+# ifndef AIAMAG /* God-fearing UNIX */
+
+# define SARFMAG 2
+# define SARHDR sizeof( struct ar_hdr )
+
+void
+file_archscan(
+ char *archive,
+ scanback func,
+ void *closure )
+{
+# ifndef NO_AR
+ struct ar_hdr ar_hdr;
+ char buf[ MAXJPATH ];
+ long offset;
+ char *string_table = 0;
+ int fd;
+
+ if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 )
+ return;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG ||
+ strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR )
+ && !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
+#ifdef ARFZMAG
+ /* OSF also has a compressed format */
+ && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
+#endif
+ ) )
+ {
+ char lar_name_[257];
+ char * lar_name = lar_name_ + 1;
+ long lar_date;
+ long lar_size;
+ long lar_offset;
+ char * c;
+ char * src;
+ char * dest;
+
+ strncpy( lar_name, ar_hdr.ar_name, sizeof(ar_hdr.ar_name) );
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ if (ar_hdr.ar_name[0] == '/')
+ {
+ if (ar_hdr.ar_name[1] == '/')
+ {
+ /* this is the "string table" entry of the symbol table,
+ ** which holds strings of filenames that are longer than
+ ** 15 characters (ie. don't fit into a ar_name
+ */
+
+ string_table = (char *)BJAM_MALLOC_ATOMIC(lar_size);
+ lseek(fd, offset + SARHDR, 0);
+ if (read(fd, string_table, lar_size) != lar_size)
+ printf("error reading string table\n");
+ }
+ else if (string_table && ar_hdr.ar_name[1] != ' ')
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is
+ ** the offset of the string in the string table represented
+ ** in ASCII decimals.
+ */
+ dest = lar_name;
+ lar_offset = atoi(lar_name + 1);
+ src = &string_table[lar_offset];
+ while (*src != '/')
+ *dest++ = *src++;
+ *dest = '/';
+ }
+ }
+
+ c = lar_name - 1;
+ while ( ( *++c != ' ' ) && ( *c != '/' ) ) ;
+ *c = '\0';
+
+ if ( DEBUG_BINDSCAN )
+ printf( "archive name %s found\n", lar_name );
+
+ sprintf( buf, "%s(%s)", archive, lar_name );
+
+ (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
+
+ offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
+ lseek( fd, offset, 0 );
+ }
+
+ if (string_table)
+ BJAM_FREE(string_table);
+
+ close( fd );
+
+# endif /* NO_AR */
+
+}
+
+# else /* AIAMAG - RS6000 AIX */
+
+static void file_archscan_small(
+ int fd, char const *archive, scanback func, void *closure)
+{
+ struct fl_hdr fl_hdr;
+
+ struct {
+ struct ar_hdr hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long offset;
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ)
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( offset > 0 )
+ && ( lseek( fd, offset, 0 ) >= 0 )
+ && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
+
+ (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
+ }
+}
+
+/* Check for OS version which supports the big variant. */
+#ifdef AR_HSZ_BIG
+
+static void file_archscan_big(
+ int fd, char const *archive, scanback func, void *closure)
+{
+ struct fl_hdr_big fl_hdr;
+
+ struct {
+ struct ar_hdr_big hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long long offset;
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG) != FL_HSZ_BIG)
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( offset > 0 )
+ && ( lseek( fd, offset, 0 ) >= 0 )
+ && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
+
+ (*func)( closure, buf, 1 /* time valid */, (time_t)lar_date );
+ }
+
+}
+
+#endif /* AR_HSZ_BIG */
+
+void file_archscan(char *archive, scanback func, void *closure)
+{
+ int fd;
+ char fl_magic[SAIAMAG];
+
+ if (( fd = open(archive, O_RDONLY, 0)) < 0)
+ return;
+
+ if (read( fd, fl_magic, SAIAMAG) != SAIAMAG
+ || lseek(fd, 0, SEEK_SET) == -1)
+ {
+ close(fd);
+ return;
+ }
+
+ if (strncmp(AIAMAG, fl_magic, SAIAMAG) == 0)
+ {
+ /* read small variant */
+ file_archscan_small(fd, archive, func, closure);
+ }
+#ifdef AR_HSZ_BIG
+ else if (strncmp(AIAMAGBIG, fl_magic, SAIAMAG) == 0)
+ {
+ /* read big variant */
+ file_archscan_big(fd, archive, func, closure);
+ }
+#endif
+
+ close( fd );
+}
+
+# endif /* AIAMAG - RS6000 AIX */
+
+# endif /* USE_FILEUNIX */
diff --git a/jam-files/engine/filevms.c b/jam-files/engine/filevms.c
new file mode 100644
index 000000000..d2ab2047f
--- /dev/null
+++ b/jam-files/engine/filevms.c
@@ -0,0 +1,327 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "filesys.h"
+# include "pathsys.h"
+
+# ifdef OS_VMS
+
+/*
+ * filevms.c - scan directories and libaries on VMS
+ *
+ * External routines:
+ *
+ * file_dirscan() - scan a directory for files
+ * file_time() - get timestamp of file, if not done by file_dirscan()
+ * file_archscan() - scan an archive for files
+ *
+ * File_dirscan() and file_archscan() call back a caller provided function
+ * for each file found. A flag to this callback function lets file_dirscan()
+ * and file_archscan() indicate that a timestamp is being provided with the
+ * file. If file_dirscan() or file_archscan() do not provide the file's
+ * timestamp, interested parties may later call file_time().
+ *
+ * 02/09/95 (seiwald) - bungled R=[xxx] - was using directory length!
+ * 05/03/96 (seiwald) - split into pathvms.c
+ */
+
+# include <rms.h>
+# include <iodef.h>
+# include <ssdef.h>
+# include <string.h>
+# include <stdlib.h>
+# include <stdio.h>
+# include <descrip.h>
+
+#include <lbrdef.h>
+#include <credef.h>
+#include <mhddef.h>
+#include <lhidef.h>
+#include <lib$routines.h>
+#include <starlet.h>
+
+/* Supply missing prototypes for lbr$-routines*/
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+int lbr$set_module(
+ void **,
+ unsigned long *,
+ struct dsc$descriptor_s *,
+ unsigned short *,
+ void * );
+
+int lbr$open( void **,
+ struct dsc$descriptor_s *,
+ void *,
+ void *,
+ void *,
+ void *,
+ void * );
+
+int lbr$ini_control(
+ void **,
+ unsigned long *,
+ unsigned long *,
+ void * );
+
+int lbr$get_index(
+ void **,
+ unsigned long *,
+ int (*func)( struct dsc$descriptor_s *, unsigned long *),
+ void * );
+
+int lbr$close(
+ void ** );
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+static void
+file_cvttime(
+ unsigned int *curtime,
+ time_t *unixtime )
+{
+ static const size_t divisor = 10000000;
+ static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */
+ int delta[2], remainder;
+
+ lib$subx( curtime, bastim, delta );
+ lib$ediv( &divisor, delta, unixtime, &remainder );
+}
+
+# define DEFAULT_FILE_SPECIFICATION "[]*.*;0"
+
+# define min( a,b ) ((a)<(b)?(a):(b))
+
+void
+file_dirscan(
+ char *dir,
+ scanback func,
+ void *closure )
+{
+
+ struct FAB xfab;
+ struct NAM xnam;
+ struct XABDAT xab;
+ char esa[256];
+ char filename[256];
+ string filename2[1];
+ char dirname[256];
+ register int status;
+ PATHNAME f;
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+
+ f.f_root.ptr = dir;
+ f.f_root.len = strlen( dir );
+
+ /* get the input file specification
+ */
+ xnam = cc$rms_nam;
+ xnam.nam$l_esa = esa;
+ xnam.nam$b_ess = sizeof( esa ) - 1;
+ xnam.nam$l_rsa = filename;
+ xnam.nam$b_rss = min( sizeof( filename ) - 1, NAM$C_MAXRSS );
+
+ xab = cc$rms_xabdat; /* initialize extended attributes */
+ xab.xab$b_cod = XAB$C_DAT; /* ask for date */
+ xab.xab$l_nxt = NULL; /* terminate XAB chain */
+
+ xfab = cc$rms_fab;
+ xfab.fab$l_dna = DEFAULT_FILE_SPECIFICATION;
+ xfab.fab$b_dns = sizeof( DEFAULT_FILE_SPECIFICATION ) - 1;
+ xfab.fab$l_fop = FAB$M_NAM;
+ xfab.fab$l_fna = dir; /* address of file name */
+ xfab.fab$b_fns = strlen( dir ); /* length of file name */
+ xfab.fab$l_nam = &xnam; /* address of NAB block */
+ xfab.fab$l_xab = (char *)&xab; /* address of XAB block */
+
+
+ status = sys$parse( &xfab );
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", dir );
+
+ if ( !( status & 1 ) )
+ return;
+
+
+
+ /* Add bogus directory for [000000] */
+
+ if ( !strcmp( dir, "[000000]" ) )
+ {
+ (*func)( closure, "[000000]", 1 /* time valid */, 1 /* old but true */ );
+ }
+
+ /* Add bogus directory for [] */
+
+ if ( !strcmp( dir, "[]" ) )
+ {
+ (*func)( closure, "[]", 1 /* time valid */, 1 /* old but true */ );
+ (*func)( closure, "[-]", 1 /* time valid */, 1 /* old but true */ );
+ }
+
+ string_new( filename2 );
+ while ( (status = sys$search( &xfab )) & 1 )
+ {
+ char *s;
+ time_t time;
+
+ /* "I think that might work" - eml */
+
+ sys$open( &xfab );
+ sys$close( &xfab );
+
+ file_cvttime( (unsigned int *)&xab.xab$q_rdt, &time );
+
+ filename[xnam.nam$b_rsl] = '\0';
+
+ /* What we do with the name depends on the suffix: */
+ /* .dir is a directory */
+ /* .xxx is a file with a suffix */
+ /* . is no suffix at all */
+
+ if ( xnam.nam$b_type == 4 && !strncmp( xnam.nam$l_type, ".DIR", 4 ) )
+ {
+ /* directory */
+ sprintf( dirname, "[.%.*s]", xnam.nam$b_name, xnam.nam$l_name );
+ f.f_dir.ptr = dirname;
+ f.f_dir.len = strlen( dirname );
+ f.f_base.ptr = 0;
+ f.f_base.len = 0;
+ f.f_suffix.ptr = 0;
+ f.f_suffix.len = 0;
+ }
+ else
+ {
+ /* normal file with a suffix */
+ f.f_dir.ptr = 0;
+ f.f_dir.len = 0;
+ f.f_base.ptr = xnam.nam$l_name;
+ f.f_base.len = xnam.nam$b_name;
+ f.f_suffix.ptr = xnam.nam$l_type;
+ f.f_suffix.len = xnam.nam$b_type;
+ }
+
+ string_truncate( filename2, 0 );
+ path_build( &f, filename2, 0 );
+
+ /*
+ if ( DEBUG_SEARCH )
+ printf("root '%s' base %.*s suf %.*s = %s\n",
+ dir,
+ xnam.nam$b_name, xnam.nam$l_name,
+ xnam.nam$b_type, xnam.nam$l_type,
+ filename2 );
+ */
+
+ (*func)( closure, filename2->value, 1 /* time valid */, time );
+ }
+ string_free( filename2 );
+}
+
+int
+file_time(
+ char *filename,
+ time_t *time )
+{
+ /* This should never be called, as all files are */
+ /* timestampped in file_dirscan() and file_archscan() */
+ return -1;
+}
+
+static char *VMS_archive = 0;
+static scanback VMS_func;
+static void *VMS_closure;
+static void *context;
+
+static int
+file_archmember(
+ struct dsc$descriptor_s *module,
+ unsigned long *rfa )
+{
+ static struct dsc$descriptor_s bufdsc =
+ {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
+
+ struct mhddef *mhd;
+ char filename[128];
+ char buf[ MAXJPATH ];
+
+ int status;
+ time_t library_date;
+
+ register int i;
+ register char *p;
+
+ bufdsc.dsc$a_pointer = filename;
+ bufdsc.dsc$w_length = sizeof( filename );
+ status = lbr$set_module( &context, rfa, &bufdsc,
+ &bufdsc.dsc$w_length, NULL );
+
+ if ( !(status & 1) )
+ return ( 1 );
+
+ mhd = (struct mhddef *)filename;
+
+ file_cvttime( &mhd->mhd$l_datim, &library_date );
+
+ for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p )
+ filename[ i ] = *p;
+
+ filename[ i ] = '\0';
+
+ sprintf( buf, "%s(%s.obj)", VMS_archive, filename );
+
+ (*VMS_func)( VMS_closure, buf, 1 /* time valid */, (time_t)library_date );
+
+ return ( 1 );
+}
+
+
+void file_archscan( char * archive, scanback func, void * closure )
+{
+ static struct dsc$descriptor_s library =
+ {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
+
+ unsigned long lfunc = LBR$C_READ;
+ unsigned long typ = LBR$C_TYP_UNK;
+ unsigned long index = 1;
+
+ register int status;
+
+ VMS_archive = archive;
+ VMS_func = func;
+ VMS_closure = closure;
+
+ status = lbr$ini_control( &context, &lfunc, &typ, NULL );
+ if ( !( status & 1 ) )
+ return;
+
+ library.dsc$a_pointer = archive;
+ library.dsc$w_length = strlen( archive );
+
+ status = lbr$open( &context, &library, NULL, NULL, NULL, NULL, NULL );
+ if ( !( status & 1 ) )
+ return;
+
+ (void) lbr$get_index( &context, &index, file_archmember, NULL );
+
+ (void) lbr$close( &context );
+}
+
+# endif /* VMS */
diff --git a/jam-files/engine/frames.c b/jam-files/engine/frames.c
new file mode 100644
index 000000000..84889f09e
--- /dev/null
+++ b/jam-files/engine/frames.c
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "frames.h"
+# include "lists.h"
+
+void frame_init( FRAME* frame )
+{
+ frame->prev = 0;
+ lol_init(frame->args);
+ frame->module = root_module();
+ frame->rulename = "module scope";
+ frame->procedure = 0;
+}
+
+void frame_free( FRAME* frame )
+{
+ lol_free( frame->args );
+}
diff --git a/jam-files/engine/frames.h b/jam-files/engine/frames.h
new file mode 100644
index 000000000..693d77fa0
--- /dev/null
+++ b/jam-files/engine/frames.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+#ifndef FRAMES_DWA20011021_H
+#define FRAMES_DWA20011021_H
+
+#include "lists.h"
+#include "modules.h"
+
+typedef struct _PARSE PARSE;
+typedef struct frame FRAME;
+
+struct frame
+{
+ FRAME * prev;
+ /* The nearest enclosing frame for which module->user_module is true. */
+ FRAME * prev_user;
+ LOL args[ 1 ];
+ module_t * module;
+ PARSE * procedure;
+ char * rulename;
+};
+
+
+/* When call into Python is in progress, this variable points to the bjam frame
+ * that was current at the moment of call. When the call completes, the variable
+ * is not defined. Further, if Jam calls Python which calls Jam and so on, this
+ * variable only keeps the most recent Jam frame.
+ */
+extern struct frame * frame_before_python_call;
+
+void frame_init( FRAME * ); /* implemented in compile.c */
+void frame_free( FRAME * ); /* implemented in compile.c */
+
+#endif
diff --git a/jam-files/engine/glob.c b/jam-files/engine/glob.c
new file mode 100644
index 000000000..527d6c808
--- /dev/null
+++ b/jam-files/engine/glob.c
@@ -0,0 +1,152 @@
+/*
+ * Copyright 1994 Christopher Seiwald. All rights reserved.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * glob.c - match a string against a simple pattern
+ *
+ * Understands the following patterns:
+ *
+ * * any number of characters
+ * ? any single character
+ * [a-z] any single character in the range a-z
+ * [^a-z] any single character not in the range a-z
+ * \x match x
+ *
+ * External functions:
+ *
+ * glob() - match a string against a simple pattern
+ *
+ * Internal functions:
+ *
+ * globchars() - build a bitlist to check for character group match
+ */
+
+# include "jam.h"
+
+# define CHECK_BIT( tab, bit ) ( tab[ (bit)/8 ] & (1<<( (bit)%8 )) )
+# define BITLISTSIZE 16 /* bytes used for [chars] in compiled expr */
+
+static void globchars( char * s, char * e, char * b );
+
+
+/*
+ * glob() - match a string against a simple pattern.
+ */
+
+int glob( char * c, char * s )
+{
+ char bitlist[ BITLISTSIZE ];
+ char * here;
+
+ for ( ; ; )
+ switch ( *c++ )
+ {
+ case '\0':
+ return *s ? -1 : 0;
+
+ case '?':
+ if ( !*s++ )
+ return 1;
+ break;
+
+ case '[':
+ /* Scan for matching ]. */
+
+ here = c;
+ do if ( !*c++ ) return 1;
+ while ( ( here == c ) || ( *c != ']' ) );
+ ++c;
+
+ /* Build character class bitlist. */
+
+ globchars( here, c, bitlist );
+
+ if ( !CHECK_BIT( bitlist, *(unsigned char *)s ) )
+ return 1;
+ ++s;
+ break;
+
+ case '*':
+ here = s;
+
+ while ( *s )
+ ++s;
+
+ /* Try to match the rest of the pattern in a recursive */
+ /* call. If the match fails we'll back up chars, retrying. */
+
+ while ( s != here )
+ {
+ int r;
+
+ /* A fast path for the last token in a pattern. */
+ r = *c ? glob( c, s ) : *s ? -1 : 0;
+
+ if ( !r )
+ return 0;
+ if ( r < 0 )
+ return 1;
+ --s;
+ }
+ break;
+
+ case '\\':
+ /* Force literal match of next char. */
+ if ( !*c || ( *s++ != *c++ ) )
+ return 1;
+ break;
+
+ default:
+ if ( *s++ != c[ -1 ] )
+ return 1;
+ break;
+ }
+}
+
+
+/*
+ * globchars() - build a bitlist to check for character group match.
+ */
+
+static void globchars( char * s, char * e, char * b )
+{
+ int neg = 0;
+
+ memset( b, '\0', BITLISTSIZE );
+
+ if ( *s == '^' )
+ {
+ ++neg;
+ ++s;
+ }
+
+ while ( s < e )
+ {
+ int c;
+
+ if ( ( s + 2 < e ) && ( s[1] == '-' ) )
+ {
+ for ( c = s[0]; c <= s[2]; ++c )
+ b[ c/8 ] |= ( 1 << ( c % 8 ) );
+ s += 3;
+ }
+ else
+ {
+ c = *s++;
+ b[ c/8 ] |= ( 1 << ( c % 8 ) );
+ }
+ }
+
+ if ( neg )
+ {
+ int i;
+ for ( i = 0; i < BITLISTSIZE; ++i )
+ b[ i ] ^= 0377;
+ }
+
+ /* Do not include \0 in either $[chars] or $[^chars]. */
+ b[0] &= 0376;
+}
diff --git a/jam-files/engine/hash.c b/jam-files/engine/hash.c
new file mode 100644
index 000000000..fbd1a8993
--- /dev/null
+++ b/jam-files/engine/hash.c
@@ -0,0 +1,459 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "hash.h"
+# include "compile.h"
+# include <assert.h>
+
+/*
+ * hash.c - simple in-memory hashing routines
+ *
+ * External routines:
+ *
+ * hashinit() - initialize a hash table, returning a handle
+ * hashitem() - find a record in the table, and optionally enter a new one
+ * hashdone() - free a hash table, given its handle
+ *
+ * Internal routines:
+ *
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ *
+ * 4/29/93 - ensure ITEM's are aligned
+ */
+
+/* */
+#define HASH_DEBUG_PROFILE 1
+/* */
+
+char *hashsccssid="@(#)hash.c 1.14 () 6/20/88";
+
+/* Header attached to all data items entered into a hash table. */
+
+struct hashhdr
+{
+ struct item * next;
+ unsigned int keyval; /* for quick comparisons */
+};
+
+/* This structure overlays the one handed to hashenter(). Its actual size is
+ * given to hashinit().
+ */
+
+struct hashdata
+{
+ char * key;
+ /* rest of user data */
+};
+
+typedef struct item
+{
+ struct hashhdr hdr;
+ struct hashdata data;
+} ITEM ;
+
+# define MAX_LISTS 32
+
+struct hash
+{
+ /*
+ * the hash table, just an array of item pointers
+ */
+ struct {
+ int nel;
+ ITEM **base;
+ } tab;
+
+ int bloat; /* tab.nel / items.nel */
+ int inel; /* initial number of elements */
+
+ /*
+ * the array of records, maintained by these routines
+ * essentially a microallocator
+ */
+ struct {
+ int more; /* how many more ITEMs fit in lists[ list ] */
+ ITEM *free; /* free list of items */
+ char *next; /* where to put more ITEMs in lists[ list ] */
+ int datalen; /* length of records in this hash table */
+ int size; /* sizeof( ITEM ) + aligned datalen */
+ int nel; /* total ITEMs held by all lists[] */
+ int list; /* index into lists[] */
+
+ struct {
+ int nel; /* total ITEMs held by this list */
+ char *base; /* base of ITEMs array */
+ } lists[ MAX_LISTS ];
+ } items;
+
+ char * name; /* just for hashstats() */
+};
+
+static void hashrehash( struct hash *hp );
+static void hashstat( struct hash *hp );
+static void * hash_mem_alloc(size_t datalen, size_t size);
+static void hash_mem_free(size_t datalen, void * data);
+#ifdef OPT_BOEHM_GC
+static void hash_mem_finalizer(char * key, struct hash * hp);
+#endif
+
+static unsigned int jenkins_one_at_a_time_hash(const unsigned char *key)
+{
+ unsigned int hash = 0;
+
+ while ( *key )
+ {
+ hash += *key++;
+ hash += (hash << 10);
+ hash ^= (hash >> 6);
+ }
+ hash += (hash << 3);
+ hash ^= (hash >> 11);
+ hash += (hash << 15);
+
+ return hash;
+}
+
+/*
+static unsigned int knuth_hash(const unsigned char *key)
+{
+ unsigned int keyval = *key;
+ while ( *key )
+ keyval = keyval * 2147059363 + *key++;
+ return keyval;
+}
+*/
+
+static unsigned int hash_keyval( const char * key_ )
+{
+ /*
+ return knuth_hash((const unsigned char *)key_);
+ */
+ return jenkins_one_at_a_time_hash((const unsigned char *)key_);
+}
+
+#define hash_bucket(hp,keyval) ((hp)->tab.base + ( (keyval) % (hp)->tab.nel ))
+
+/* Find the hash item for the given data. Returns pointer to the
+ item and if given a pointer to the item before the found item.
+ If it's the first item in a bucket, there is no previous item,
+ and zero is returned for the previous item instead.
+*/
+static ITEM * hash_search(
+ struct hash *hp,
+ unsigned int keyval,
+ const char * keydata,
+ ITEM * * previous )
+{
+ ITEM * i = *hash_bucket(hp,keyval);
+ ITEM * p = 0;
+
+ for ( ; i; i = i->hdr.next )
+ {
+ if ( ( keyval == i->hdr.keyval ) &&
+ !strcmp( i->data.key, keydata ) )
+ {
+ if (previous)
+ {
+ *previous = p;
+ }
+ return i;
+ }
+ p = i;
+ }
+
+ return 0;
+}
+
+/*
+ * hash_free() - remove the given item from the table if it's there.
+ * Returns 1 if found, 0 otherwise.
+ *
+ * NOTE: 2nd argument is HASHDATA*, not HASHDATA** as elsewhere.
+ */
+int
+hash_free(
+ register struct hash *hp,
+ HASHDATA *data)
+{
+ ITEM * i = 0;
+ ITEM * prev = 0;
+ unsigned int keyval = hash_keyval(data->key);
+
+ i = hash_search( hp, keyval, data->key, &prev );
+ if (i)
+ {
+ /* mark it free so we skip it during enumeration */
+ i->data.key = 0;
+ /* unlink the record from the hash chain */
+ if (prev) prev->hdr.next = i->hdr.next;
+ else *hash_bucket(hp,keyval) = i->hdr.next;
+ /* link it into the freelist */
+ i->hdr.next = hp->items.free;
+ hp->items.free = i;
+ /* we have another item */
+ hp->items.more++;
+
+ return 1;
+ }
+ return 0;
+}
+
+/*
+ * hashitem() - find a record in the table, and optionally enter a new one
+ */
+
+int
+hashitem(
+ register struct hash *hp,
+ HASHDATA **data,
+ int enter )
+{
+ register ITEM *i;
+ char *b = (*data)->key;
+ unsigned int keyval = hash_keyval(b);
+
+ #ifdef HASH_DEBUG_PROFILE
+ profile_frame prof[1];
+ if ( DEBUG_PROFILE )
+ profile_enter( 0, prof );
+ #endif
+
+ if ( enter && !hp->items.more )
+ hashrehash( hp );
+
+ if ( !enter && !hp->items.nel )
+ {
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+ return 0;
+ }
+
+ i = hash_search( hp, keyval, (*data)->key, 0 );
+ if (i)
+ {
+ *data = &i->data;
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE ) profile_exit( prof );
+ #endif
+ return !0;
+ }
+
+ if ( enter )
+ {
+ ITEM * * base = hash_bucket(hp,keyval);
+
+ /* try to grab one from the free list */
+ if ( hp->items.free )
+ {
+ i = hp->items.free;
+ hp->items.free = i->hdr.next;
+ assert( i->data.key == 0 );
+ }
+ else
+ {
+ i = (ITEM *)hp->items.next;
+ hp->items.next += hp->items.size;
+ }
+ hp->items.more--;
+ memcpy( (char *)&i->data, (char *)*data, hp->items.datalen );
+ i->hdr.keyval = keyval;
+ i->hdr.next = *base;
+ *base = i;
+ *data = &i->data;
+ #ifdef OPT_BOEHM_GC
+ if (sizeof(HASHDATA) == hp->items.datalen)
+ {
+ GC_REGISTER_FINALIZER(i->data.key,&hash_mem_finalizer,hp,0,0);
+ }
+ #endif
+ }
+
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+ return 0;
+}
+
+/*
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ */
+
+static void hashrehash( register struct hash *hp )
+{
+ int i = ++hp->items.list;
+ hp->items.more = i ? 2 * hp->items.nel : hp->inel;
+ hp->items.next = (char *)hash_mem_alloc( hp->items.datalen, hp->items.more * hp->items.size );
+ hp->items.free = 0;
+
+ hp->items.lists[i].nel = hp->items.more;
+ hp->items.lists[i].base = hp->items.next;
+ hp->items.nel += hp->items.more;
+
+ if ( hp->tab.base )
+ hash_mem_free( hp->items.datalen, (char *)hp->tab.base );
+
+ hp->tab.nel = hp->items.nel * hp->bloat;
+ hp->tab.base = (ITEM **)hash_mem_alloc( hp->items.datalen, hp->tab.nel * sizeof(ITEM **) );
+
+ memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
+
+ for ( i = 0; i < hp->items.list; ++i )
+ {
+ int nel = hp->items.lists[i].nel;
+ char *next = hp->items.lists[i].base;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ register ITEM *i = (ITEM *)next;
+ ITEM **ip = hp->tab.base + i->hdr.keyval % hp->tab.nel;
+ /* code currently assumes rehashing only when there are no free items */
+ assert( i->data.key != 0 );
+
+ i->hdr.next = *ip;
+ *ip = i;
+ }
+ }
+}
+
+void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data )
+{
+ int i;
+ for ( i = 0; i <= hp->items.list; ++i )
+ {
+ char * next = hp->items.lists[i].base;
+ int nel = hp->items.lists[i].nel;
+ if ( i == hp->items.list )
+ nel -= hp->items.more;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ ITEM * i = (ITEM *)next;
+ if ( i->data.key != 0 ) /* DO not enumerate freed items. */
+ f( &i->data, data );
+ }
+ }
+}
+
+/* --- */
+
+# define ALIGNED(x) ( ( x + sizeof( ITEM ) - 1 ) & ~( sizeof( ITEM ) - 1 ) )
+
+/*
+ * hashinit() - initialize a hash table, returning a handle
+ */
+
+struct hash *
+hashinit(
+ int datalen,
+ char *name )
+{
+ struct hash *hp = (struct hash *)hash_mem_alloc( datalen, sizeof( *hp ) );
+
+ hp->bloat = 3;
+ hp->tab.nel = 0;
+ hp->tab.base = (ITEM **)0;
+ hp->items.more = 0;
+ hp->items.free = 0;
+ hp->items.datalen = datalen;
+ hp->items.size = sizeof( struct hashhdr ) + ALIGNED( datalen );
+ hp->items.list = -1;
+ hp->items.nel = 0;
+ hp->inel = 11 /* 47 */;
+ hp->name = name;
+
+ return hp;
+}
+
+/*
+ * hashdone() - free a hash table, given its handle
+ */
+
+void
+hashdone( struct hash *hp )
+{
+ int i;
+
+ if ( !hp )
+ return;
+
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ hashstat( hp );
+
+ if ( hp->tab.base )
+ hash_mem_free( hp->items.datalen, (char *)hp->tab.base );
+ for ( i = 0; i <= hp->items.list; ++i )
+ hash_mem_free( hp->items.datalen, hp->items.lists[i].base );
+ hash_mem_free( hp->items.datalen, (char *)hp );
+}
+
+static void * hash_mem_alloc(size_t datalen, size_t size)
+{
+ if (sizeof(HASHDATA) == datalen)
+ {
+ return BJAM_MALLOC_RAW(size);
+ }
+ else
+ {
+ return BJAM_MALLOC(size);
+ }
+}
+
+static void hash_mem_free(size_t datalen, void * data)
+{
+ if (sizeof(HASHDATA) == datalen)
+ {
+ BJAM_FREE_RAW(data);
+ }
+ else
+ {
+ BJAM_FREE(data);
+ }
+}
+
+#ifdef OPT_BOEHM_GC
+static void hash_mem_finalizer(char * key, struct hash * hp)
+{
+ HASHDATA d;
+ d.key = key;
+ hash_free(hp,&d);
+}
+#endif
+
+
+/* ---- */
+
+static void hashstat( struct hash * hp )
+{
+ ITEM * * tab = hp->tab.base;
+ int nel = hp->tab.nel;
+ int count = 0;
+ int sets = 0;
+ int run = ( tab[ nel - 1 ] != (ITEM *)0 );
+ int i;
+ int here;
+
+ for ( i = nel; i > 0; --i )
+ {
+ if ( ( here = ( *tab++ != (ITEM *)0 ) ) )
+ count++;
+ if ( here && !run )
+ sets++;
+ run = here;
+ }
+
+ printf( "%s table: %d+%d+%d (%dK+%luK) items+table+hash, %f density\n",
+ hp->name,
+ count,
+ hp->items.nel,
+ hp->tab.nel,
+ hp->items.nel * hp->items.size / 1024,
+ (long unsigned)hp->tab.nel * sizeof( ITEM ** ) / 1024,
+ (float)count / (float)sets );
+}
diff --git a/jam-files/engine/hash.h b/jam-files/engine/hash.h
new file mode 100644
index 000000000..7195b4146
--- /dev/null
+++ b/jam-files/engine/hash.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hash.h - simple in-memory hashing routines
+ */
+
+#ifndef BOOST_JAM_HASH_H
+#define BOOST_JAM_HASH_H
+
+typedef struct hashdata HASHDATA;
+
+struct hash * hashinit ( int datalen, char * name );
+int hashitem ( struct hash * hp, HASHDATA * * data, int enter );
+void hashdone ( struct hash * hp );
+void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data );
+int hash_free ( struct hash * hp, HASHDATA * data);
+
+#define hashenter( hp, data ) ( !hashitem( hp, data, !0 ) )
+#define hashcheck( hp, data ) hashitem( hp, data, 0 )
+
+#endif
diff --git a/jam-files/engine/hcache.c b/jam-files/engine/hcache.c
new file mode 100644
index 000000000..70bb798cc
--- /dev/null
+++ b/jam-files/engine/hcache.c
@@ -0,0 +1,434 @@
+/*
+ * This file has been donated to Jam.
+ */
+
+# include "jam.h"
+# include "lists.h"
+# include "parse.h"
+# include "rules.h"
+# include "regexp.h"
+# include "headers.h"
+# include "newstr.h"
+# include "hash.h"
+# include "hcache.h"
+# include "variable.h"
+# include "search.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+
+/*
+ * Craig W. McPheeters, Alias|Wavefront.
+ *
+ * hcache.c hcache.h - handle cacheing of #includes in source files.
+ *
+ * Create a cache of files scanned for headers. When starting jam, look for the
+ * cache file and load it if present. When finished the binding phase, create a
+ * new header cache. The cache contains files, their timestamps and the header
+ * files found in their scan. During the binding phase of jam, look in the
+ * header cache first for the headers contained in a file. If the cache is
+ * present and valid, use its contents. This results in dramatic speedups with
+ * large projects (eg. 3min -> 1min startup for one project.)
+ *
+ * External routines:
+ * hcache_init() - read and parse the local .jamdeps file.
+ * hcache_done() - write a new .jamdeps file.
+ * hcache() - return list of headers on target. Use cache or do a scan.
+ *
+ * The dependency file format is an ASCII file with 1 line per target. Each line
+ * has the following fields:
+ * @boundname@ timestamp @file@ @file@ @file@ ... \n
+ */
+
+typedef struct hcachedata HCACHEDATA ;
+
+struct hcachedata
+{
+ char * boundname;
+ time_t time;
+ LIST * includes;
+ LIST * hdrscan; /* the HDRSCAN value for this target */
+ int age; /* if too old, we'll remove it from cache */
+ HCACHEDATA * next;
+};
+
+
+static struct hash * hcachehash = 0;
+static HCACHEDATA * hcachelist = 0;
+
+static int queries = 0;
+static int hits = 0;
+
+#define CACHE_FILE_VERSION "version 4"
+#define CACHE_RECORD_HEADER "header"
+#define CACHE_RECORD_END "end"
+
+
+/*
+ * Return the name of the header cache file. May return NULL.
+ *
+ * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
+ * the result so the user can not change the cache file during header scanning.
+ */
+
+static char * cache_name( void )
+{
+ static char * name = 0;
+ if ( !name )
+ {
+ LIST * hcachevar = var_get( "HCACHEFILE" );
+
+ if ( hcachevar )
+ {
+ TARGET * t = bindtarget( hcachevar->string );
+
+ pushsettings( t->settings );
+ /* Do not expect the cache file to be generated, so pass 0 as the
+ * third argument to search. Expect the location to be specified via
+ * LOCATE, so pass 0 as the fourth arugment.
+ */
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( t->settings );
+
+ if ( hcachevar )
+ name = copystr( t->boundname );
+ }
+ }
+ return name;
+}
+
+
+/*
+ * Return the maximum age a cache entry can have before it is purged ftom the
+ * cache.
+ */
+
+static int cache_maxage( void )
+{
+ int age = 100;
+ LIST * var = var_get( "HCACHEMAXAGE" );
+ if ( var )
+ {
+ age = atoi( var->string );
+ if ( age < 0 )
+ age = 0;
+ }
+ return age;
+}
+
+
+/*
+ * Read a netstring. The caveat is that the string can not contain ASCII 0. The
+ * returned value is as returned by newstr(), so it need not be freed.
+ */
+
+char * read_netstring( FILE * f )
+{
+ unsigned long len;
+ static char * buf = NULL;
+ static unsigned long buf_len = 0;
+
+ if ( fscanf( f, " %9lu", &len ) != 1 )
+ return NULL;
+ if ( fgetc( f ) != (int)'\t' )
+ return NULL;
+
+ if ( len > 1024 * 64 )
+ return NULL; /* sanity check */
+
+ if ( len > buf_len )
+ {
+ unsigned long new_len = buf_len * 2;
+ if ( new_len < len )
+ new_len = len;
+ buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
+ if ( buf )
+ buf_len = new_len;
+ }
+
+ if ( !buf )
+ return NULL;
+
+ if ( fread( buf, 1, len, f ) != len )
+ return NULL;
+ if ( fgetc( f ) != (int)'\n' )
+ return NULL;
+
+ buf[ len ] = 0;
+ return newstr( buf );
+}
+
+
+/*
+ * Write a netstring.
+ */
+
+void write_netstring( FILE * f, char const * s )
+{
+ if ( !s )
+ s = "";
+ fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
+}
+
+
+void hcache_init()
+{
+ HCACHEDATA cachedata;
+ HCACHEDATA * c;
+ FILE * f;
+ char * version;
+ int header_count = 0;
+ char * hcachename;
+
+ hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
+
+ if ( !( hcachename = cache_name() ) )
+ return;
+
+ if ( !( f = fopen( hcachename, "rb" ) ) )
+ return;
+
+ version = read_netstring( f );
+ if ( !version || strcmp( version, CACHE_FILE_VERSION ) )
+ {
+ fclose( f );
+ return;
+ }
+
+ while ( 1 )
+ {
+ char * record_type;
+ char * time_str;
+ char * age_str;
+ char * includes_count_str;
+ char * hdrscan_count_str;
+ int i;
+ int count;
+ LIST * l;
+
+ record_type = read_netstring( f );
+ if ( !record_type )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto bail;
+ }
+ if ( !strcmp( record_type, CACHE_RECORD_END ) )
+ break;
+ if ( strcmp( record_type, CACHE_RECORD_HEADER ) )
+ {
+ fprintf( stderr, "invalid %s with record separator <%s>\n",
+ hcachename, record_type ? record_type : "<null>" );
+ goto bail;
+ }
+
+ c = &cachedata;
+
+ c->boundname = read_netstring( f );
+ time_str = read_netstring( f );
+ age_str = read_netstring( f );
+ includes_count_str = read_netstring( f );
+
+ if ( !c->boundname || !time_str || !age_str || !includes_count_str )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto bail;
+ }
+
+ c->time = atoi( time_str );
+ c->age = atoi( age_str ) + 1;
+
+ count = atoi( includes_count_str );
+ for ( l = 0, i = 0; i < count; ++i )
+ {
+ char * s = read_netstring( f );
+ if ( !s )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto bail;
+ }
+ l = list_new( l, s );
+ }
+ c->includes = l;
+
+ hdrscan_count_str = read_netstring( f );
+ if ( !includes_count_str )
+ {
+ list_free( c->includes );
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto bail;
+ }
+
+ count = atoi( hdrscan_count_str );
+ for ( l = 0, i = 0; i < count; ++i )
+ {
+ char * s = read_netstring( f );
+ if ( !s )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto bail;
+ }
+ l = list_new( l, s );
+ }
+ c->hdrscan = l;
+
+ if ( !hashenter( hcachehash, (HASHDATA * *)&c ) )
+ {
+ fprintf( stderr, "can't insert header cache item, bailing on %s\n",
+ hcachename );
+ goto bail;
+ }
+
+ c->next = hcachelist;
+ hcachelist = c;
+
+ ++header_count;
+ }
+
+ if ( DEBUG_HEADER )
+ printf( "hcache read from file %s\n", hcachename );
+
+ bail:
+ fclose( f );
+}
+
+
+void hcache_done()
+{
+ FILE * f;
+ HCACHEDATA * c;
+ int header_count = 0;
+ char * hcachename;
+ int maxage;
+
+ if ( !hcachehash )
+ return;
+
+ if ( !( hcachename = cache_name() ) )
+ return;
+
+ if ( !( f = fopen( hcachename, "wb" ) ) )
+ return;
+
+ maxage = cache_maxage();
+
+ /* Print out the version. */
+ write_netstring( f, CACHE_FILE_VERSION );
+
+ c = hcachelist;
+ for ( c = hcachelist; c; c = c->next )
+ {
+ LIST * l;
+ char time_str[ 30 ];
+ char age_str[ 30 ];
+ char includes_count_str[ 30 ];
+ char hdrscan_count_str[ 30 ];
+
+ if ( maxage == 0 )
+ c->age = 0;
+ else if ( c->age > maxage )
+ continue;
+
+ sprintf( includes_count_str, "%lu", (long unsigned) list_length( c->includes ) );
+ sprintf( hdrscan_count_str, "%lu", (long unsigned) list_length( c->hdrscan ) );
+ sprintf( time_str, "%lu", (long unsigned) c->time );
+ sprintf( age_str, "%lu", (long unsigned) c->age );
+
+ write_netstring( f, CACHE_RECORD_HEADER );
+ write_netstring( f, c->boundname );
+ write_netstring( f, time_str );
+ write_netstring( f, age_str );
+ write_netstring( f, includes_count_str );
+ for ( l = c->includes; l; l = list_next( l ) )
+ write_netstring( f, l->string );
+ write_netstring( f, hdrscan_count_str );
+ for ( l = c->hdrscan; l; l = list_next( l ) )
+ write_netstring( f, l->string );
+ fputs( "\n", f );
+ ++header_count;
+ }
+ write_netstring( f, CACHE_RECORD_END );
+
+ if ( DEBUG_HEADER )
+ printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
+ hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
+
+ fclose ( f );
+}
+
+
+LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
+{
+ HCACHEDATA cachedata;
+ HCACHEDATA * c = &cachedata;
+
+ LIST * l = 0;
+
+ ++queries;
+
+ c->boundname = t->boundname;
+
+ if (hashcheck (hcachehash, (HASHDATA **) &c))
+ {
+ if (c->time == t->time)
+ {
+ LIST *l1 = hdrscan, *l2 = c->hdrscan;
+ while (l1 && l2) {
+ if (l1->string != l2->string) {
+ l1 = NULL;
+ } else {
+ l1 = list_next(l1);
+ l2 = list_next(l2);
+ }
+ }
+ if (l1 || l2) {
+ if (DEBUG_HEADER)
+ printf("HDRSCAN out of date in cache for %s\n",
+ t->boundname);
+
+ printf("HDRSCAN out of date for %s\n", t->boundname);
+ printf(" real : ");
+ list_print(hdrscan);
+ printf("\n cached: ");
+ list_print(c->hdrscan);
+ printf("\n");
+
+ list_free(c->includes);
+ list_free(c->hdrscan);
+ c->includes = 0;
+ c->hdrscan = 0;
+ } else {
+ if (DEBUG_HEADER)
+ printf ("using header cache for %s\n", t->boundname);
+ c->age = 0;
+ ++hits;
+ l = list_copy (0, c->includes);
+ return l;
+ }
+ } else {
+ if (DEBUG_HEADER)
+ printf ("header cache out of date for %s\n", t->boundname);
+ list_free (c->includes);
+ list_free(c->hdrscan);
+ c->includes = 0;
+ c->hdrscan = 0;
+ }
+ } else {
+ if (hashenter (hcachehash, (HASHDATA **)&c)) {
+ c->boundname = newstr (c->boundname);
+ c->next = hcachelist;
+ hcachelist = c;
+ }
+ }
+
+ /* 'c' points at the cache entry. Its out of date. */
+
+ l = headers1 (0, t->boundname, rec, re);
+
+ c->time = t->time;
+ c->age = 0;
+ c->includes = list_copy (0, l);
+ c->hdrscan = list_copy(0, hdrscan);
+
+ return l;
+}
+
+#endif
diff --git a/jam-files/engine/hcache.h b/jam-files/engine/hcache.h
new file mode 100644
index 000000000..c316e3bca
--- /dev/null
+++ b/jam-files/engine/hcache.h
@@ -0,0 +1,18 @@
+/*
+ * This file is not part of Jam
+ */
+
+/*
+ * hcache.h - handle #includes in source files
+ */
+#ifndef HCACHE_H
+# define HCACHE_H
+
+# include "regexp.h"
+# include "lists.h"
+
+void hcache_init(void);
+void hcache_done(void);
+LIST *hcache(TARGET *t, int rec, regexp *re[], LIST *hdrscan);
+
+#endif
diff --git a/jam-files/engine/hdrmacro.c b/jam-files/engine/hdrmacro.c
new file mode 100644
index 000000000..43031d48f
--- /dev/null
+++ b/jam-files/engine/hdrmacro.c
@@ -0,0 +1,137 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "lists.h"
+# include "parse.h"
+# include "compile.h"
+# include "rules.h"
+# include "variable.h"
+# include "regexp.h"
+# include "hdrmacro.h"
+# include "hash.h"
+# include "newstr.h"
+# include "strings.h"
+
+/*
+ * hdrmacro.c - handle header files that define macros used in
+ * #include statements.
+ *
+ * we look for lines like "#define MACRO <....>" or '#define MACRO " "'
+ * in the target file. When found, we
+ *
+ * we then phony up a rule invocation like:
+ *
+ * $(HDRRULE) <target> : <resolved included files> ;
+ *
+ * External routines:
+ * headers1() - scan a target for "#include MACRO" lines and try
+ * to resolve them when needed
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
+ * so that headers() doesn't have to mock up a parse structure
+ * just to invoke a rule.
+ */
+
+/* this type is used to store a dictionary of file header macros */
+typedef struct header_macro
+{
+ char * symbol;
+ char * filename; /* we could maybe use a LIST here ?? */
+} HEADER_MACRO;
+
+static struct hash * header_macros_hash = 0;
+
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+# define MAXINC 10
+
+void
+macro_headers( TARGET *t )
+{
+ static regexp *re = 0;
+ FILE *f;
+ char buf[ 1024 ];
+
+ if ( DEBUG_HEADER )
+ printf( "macro header scan for %s\n", t->name );
+
+ /* this regexp is used to detect lines of the form */
+ /* "#define MACRO <....>" or "#define MACRO "....." */
+ /* in the header macro files.. */
+ if ( re == 0 )
+ {
+ re = regex_compile(
+ "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
+ "[<\"]([^\">]*)[\">].*$" );
+ }
+
+ if ( !( f = fopen( t->boundname, "r" ) ) )
+ return;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ HEADER_MACRO var;
+ HEADER_MACRO *v = &var;
+
+ if ( regexec( re, buf ) && re->startp[1] )
+ {
+ /* we detected a line that looks like "#define MACRO filename */
+ re->endp[1][0] = '\0';
+ re->endp[2][0] = '\0';
+
+ if ( DEBUG_HEADER )
+ printf( "macro '%s' used to define filename '%s' in '%s'\n",
+ re->startp[1], re->startp[2], t->boundname );
+
+ /* add macro definition to hash table */
+ if ( !header_macros_hash )
+ header_macros_hash = hashinit( sizeof( HEADER_MACRO ), "hdrmacros" );
+
+ v->symbol = re->startp[1];
+ v->filename = 0;
+ if ( hashenter( header_macros_hash, (HASHDATA **)&v ) )
+ {
+ v->symbol = newstr( re->startp[1] ); /* never freed */
+ v->filename = newstr( re->startp[2] ); /* never freed */
+ }
+ /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
+ /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
+ }
+ }
+
+ fclose( f );
+}
+
+
+char * macro_header_get( const char * macro_name )
+{
+ HEADER_MACRO var;
+ HEADER_MACRO * v = &var;
+
+ v->symbol = (char* )macro_name;
+
+ if ( header_macros_hash && hashcheck( header_macros_hash, (HASHDATA **)&v ) )
+ {
+ if ( DEBUG_HEADER )
+ printf( "### macro '%s' evaluated to '%s'\n", macro_name, v->filename );
+ return v->filename;
+ }
+ return 0;
+}
diff --git a/jam-files/engine/hdrmacro.h b/jam-files/engine/hdrmacro.h
new file mode 100644
index 000000000..08cc11160
--- /dev/null
+++ b/jam-files/engine/hdrmacro.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hdrmacro.h - parses header files for #define MACRO <filename> or
+ * #define MACRO "filename" definitions
+ */
+
+void macro_headers( TARGET *t );
+
+char* macro_header_get( const char* macro_name );
diff --git a/jam-files/engine/headers.c b/jam-files/engine/headers.c
new file mode 100644
index 000000000..b9d8f6370
--- /dev/null
+++ b/jam-files/engine/headers.c
@@ -0,0 +1,203 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "lists.h"
+# include "parse.h"
+# include "compile.h"
+# include "rules.h"
+# include "variable.h"
+# include "regexp.h"
+# include "headers.h"
+# include "hdrmacro.h"
+# include "newstr.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+# include "hcache.h"
+#endif
+
+/*
+ * headers.c - handle #includes in source files
+ *
+ * Using regular expressions provided as the variable $(HDRSCAN),
+ * headers() searches a file for #include files and phonies up a
+ * rule invocation:
+ *
+ * $(HDRRULE) <target> : <include files> ;
+ *
+ * External routines:
+ * headers() - scan a target for include files and call HDRRULE
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
+ * so that headers() doesn't have to mock up a parse structure
+ * just to invoke a rule.
+ */
+
+#ifndef OPT_HEADER_CACHE_EXT
+static LIST *headers1( LIST *l, char *file, int rec, regexp *re[]);
+#endif
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+# define MAXINC 10
+
+void
+headers( TARGET *t )
+{
+ LIST * hdrscan;
+ LIST * hdrrule;
+ #ifndef OPT_HEADER_CACHE_EXT
+ LIST * headlist = 0;
+ #endif
+ regexp * re[ MAXINC ];
+ int rec = 0;
+
+ if ( !( hdrscan = var_get( "HDRSCAN" ) ) ||
+ !( hdrrule = var_get( "HDRRULE" ) ) )
+ return;
+
+ if ( DEBUG_HEADER )
+ printf( "header scan %s\n", t->name );
+
+ /* Compile all regular expressions in HDRSCAN */
+ while ( ( rec < MAXINC ) && hdrscan )
+ {
+ re[ rec++ ] = regex_compile( hdrscan->string );
+ hdrscan = list_next( hdrscan );
+ }
+
+ /* Doctor up call to HDRRULE rule */
+ /* Call headers1() to get LIST of included files. */
+ {
+ FRAME frame[1];
+ frame_init( frame );
+ lol_add( frame->args, list_new( L0, t->name ) );
+#ifdef OPT_HEADER_CACHE_EXT
+ lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
+#else
+ lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
+#endif
+
+ if ( lol_get( frame->args, 1 ) )
+ {
+ /* The third argument to HDRRULE is the bound name of
+ * $(<) */
+ lol_add( frame->args, list_new( L0, t->boundname ) );
+
+ list_free( evaluate_rule( hdrrule->string, frame ) );
+ }
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * headers1() - using regexp, scan a file and build include LIST.
+ */
+
+#ifdef OPT_HEADER_CACHE_EXT
+LIST *
+#else
+static LIST *
+#endif
+headers1(
+ LIST * l,
+ char * file,
+ int rec,
+ regexp * re[] )
+{
+ FILE * f;
+ char buf[ 1024 ];
+ int i;
+ static regexp * re_macros = 0;
+
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ static int count = 0;
+ ++count;
+ if ( ((count == 100) || !( count % 1000 )) && DEBUG_MAKE )
+ printf("...patience...\n");
+#endif
+
+ /* the following regexp is used to detect cases where a */
+ /* file is included through a line line "#include MACRO" */
+ if ( re_macros == 0 )
+ re_macros = regex_compile(
+ "^[ ]*#[ ]*include[ ]*([A-Za-z][A-Za-z0-9_]*).*$" );
+
+ if ( !( f = fopen( file, "r" ) ) )
+ return l;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ int size = strlen( buf );
+ /* Remove trailing \r and \n, if any. */
+ while ( ( size > 0 ) &&
+ ( buf[ size - 1 ] == '\n' ) &&
+ ( buf[ size - 1 ] == '\r' ) )
+ {
+ buf[ size - 1 ] = '\0';
+ --size;
+ }
+
+ for ( i = 0; i < rec; ++i )
+ if ( regexec( re[i], buf ) && re[i]->startp[1] )
+ {
+ re[i]->endp[1][0] = '\0';
+
+ if ( DEBUG_HEADER )
+ printf( "header found: %s\n", re[i]->startp[1] );
+
+ l = list_new( l, newstr( re[i]->startp[1] ) );
+ }
+
+ /* special treatment for #include MACRO */
+ if ( regexec( re_macros, buf ) && re_macros->startp[1] )
+ {
+ char* header_filename;
+
+ re_macros->endp[1][0] = '\0';
+
+ if ( DEBUG_HEADER )
+ printf( "macro header found: %s", re_macros->startp[1] );
+
+ header_filename = macro_header_get( re_macros->startp[1] );
+ if ( header_filename )
+ {
+ if ( DEBUG_HEADER )
+ printf( " resolved to '%s'\n", header_filename );
+ l = list_new( l, newstr( header_filename ) );
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ printf( " ignored !!\n" );
+ }
+ }
+ }
+
+ fclose( f );
+
+ return l;
+}
+
+
+void regerror( char * s )
+{
+ printf( "re error %s\n", s );
+}
diff --git a/jam-files/engine/headers.h b/jam-files/engine/headers.h
new file mode 100644
index 000000000..624475fe7
--- /dev/null
+++ b/jam-files/engine/headers.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * headers.h - handle #includes in source files
+ */
+
+void headers( TARGET *t );
+
+#ifdef OPT_HEADER_CACHE_EXT
+struct regexp;
+LIST *headers1( LIST *l, char *file, int rec, struct regexp *re[] );
+#endif
diff --git a/jam-files/engine/jam.c b/jam-files/engine/jam.c
new file mode 100644
index 000000000..e11d082bc
--- /dev/null
+++ b/jam-files/engine/jam.c
@@ -0,0 +1,632 @@
+/*
+ * /+\
+ * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ * \+/
+ *
+ * This file is part of jam.
+ *
+ * License is hereby granted to use this software and distribute it
+ * freely, as long as this copyright notice is retained and modifications
+ * are clearly marked.
+ *
+ * ALL WARRANTIES ARE HEREBY DISCLAIMED.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.c - make redux
+ *
+ * See Jam.html for usage information.
+ *
+ * These comments document the code.
+ *
+ * The top half of the code is structured such:
+ *
+ * jam
+ * / | \
+ * +---+ | \
+ * / | \
+ * jamgram option \
+ * / | \ \
+ * / | \ \
+ * / | \ |
+ * scan | compile make
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * jambase parse | rules search make1
+ * | | | \
+ * | | | \
+ * | | | \
+ * builtins timestamp command execute
+ * |
+ * |
+ * |
+ * filesys
+ *
+ *
+ * The support routines are called by all of the above, but themselves
+ * are layered thus:
+ *
+ * variable|expand
+ * / | | |
+ * / | | |
+ * / | | |
+ * lists | | pathsys
+ * \ | |
+ * \ | |
+ * \ | |
+ * newstr |
+ * \ |
+ * \ |
+ * \ |
+ * hash
+ *
+ * Roughly, the modules are:
+ *
+ * builtins.c - jam's built-in rules
+ * command.c - maintain lists of commands
+ * compile.c - compile parsed jam statements
+ * execunix.c - execute a shell script on UNIX
+ * execvms.c - execute a shell script, ala VMS
+ * expand.c - expand a buffer, given variable values
+ * file*.c - scan directories and archives on *
+ * hash.c - simple in-memory hashing routines
+ * hdrmacro.c - handle header file parsing for filename macro definitions
+ * headers.c - handle #includes in source files
+ * jambase.c - compilable copy of Jambase
+ * jamgram.y - jam grammar
+ * lists.c - maintain lists of strings
+ * make.c - bring a target up to date, once rules are in place
+ * make1.c - execute command to bring targets up to date
+ * newstr.c - string manipulation routines
+ * option.c - command line option processing
+ * parse.c - make and destroy parse trees as driven by the parser
+ * path*.c - manipulate file names on *
+ * hash.c - simple in-memory hashing routines
+ * regexp.c - Henry Spencer's regexp
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ * scan.c - the jam yacc scanner
+ * search.c - find a target along $(SEARCH) or $(LOCATE)
+ * timestamp.c - get the timestamp of a file or archive member
+ * variable.c - handle jam multi-element variables
+ *
+ * 05/04/94 (seiwald) - async multiprocess (-j) support
+ * 02/08/95 (seiwald) - -n implies -d2.
+ * 02/22/95 (seiwald) - -v for version info.
+ * 09/11/00 (seiwald) - PATCHLEVEL folded into VERSION.
+ * 01/10/01 (seiwald) - pathsys.h split from filesys.h
+ */
+
+
+#include "jam.h"
+#include "option.h"
+#include "patchlevel.h"
+
+/* These get various function declarations. */
+#include "lists.h"
+#include "parse.h"
+#include "variable.h"
+#include "compile.h"
+#include "builtins.h"
+#include "rules.h"
+#include "newstr.h"
+#include "scan.h"
+#include "timestamp.h"
+#include "make.h"
+#include "strings.h"
+#include "expand.h"
+#include "filesys.h"
+#include "output.h"
+
+/* Macintosh is "special" */
+#ifdef OS_MAC
+ #include <QuickDraw.h>
+#endif
+
+/* And UNIX for this. */
+#ifdef unix
+ #include <sys/utsname.h>
+ #include <signal.h>
+#endif
+
+struct globs globs =
+{
+ 0, /* noexec */
+ 1, /* jobs */
+ 0, /* quitquick */
+ 0, /* newestfirst */
+ 0, /* pipes action stdout and stderr merged to action output */
+#ifdef OS_MAC
+ { 0, 0 }, /* debug - suppress tracing output */
+#else
+ { 0, 1 }, /* debug ... */
+#endif
+ 0, /* output commands, not run them */
+ 0 /* action timeout */
+};
+
+/* Symbols to be defined as true for use in Jambase. */
+static char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 };
+
+
+/* Known for sure:
+ * mac needs arg_enviro
+ * OS2 needs extern environ
+ */
+
+#ifdef OS_MAC
+ #define use_environ arg_environ
+ #ifdef MPW
+ QDGlobals qd;
+ #endif
+#endif
+
+/* on Win32-LCC */
+#if defined( OS_NT ) && defined( __LCC__ )
+ #define use_environ _environ
+#endif
+
+# if defined( __MWERKS__)
+ #define use_environ _environ
+ extern char * * _environ;
+#endif
+
+#ifndef use_environ
+ #define use_environ environ
+ #if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT )
+ extern char **environ;
+ #endif
+#endif
+
+#if YYDEBUG != 0
+ extern int yydebug;
+#endif
+
+#ifndef NDEBUG
+static void run_unit_tests()
+{
+#if defined( USE_EXECNT )
+ extern void execnt_unit_test();
+ execnt_unit_test();
+#endif
+ string_unit_test();
+ var_expand_unit_test();
+}
+#endif
+
+int anyhow = 0;
+
+#ifdef HAVE_PYTHON
+ extern PyObject * bjam_call ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
+ extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
+#endif
+
+char *saved_argv0;
+
+int main( int argc, char * * argv, char * * arg_environ )
+{
+ int n;
+ char * s;
+ struct bjam_option optv[N_OPTS];
+ char const * all = "all";
+ int status;
+ int arg_c = argc;
+ char * * arg_v = argv;
+ char const * progname = argv[0];
+
+ saved_argv0 = argv[0];
+
+ BJAM_MEM_INIT();
+
+# ifdef OS_MAC
+ InitGraf(&qd.thePort);
+# endif
+
+ --argc;
+ ++argv;
+
+ if ( getoptions( argc, argv, "-:l:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
+ {
+ printf( "\nusage: %s [ options ] targets...\n\n", progname );
+
+ printf( "-a Build all targets, even if they are current.\n" );
+ printf( "-dx Set the debug level to x (0-9).\n" );
+ printf( "-fx Read x instead of Jambase.\n" );
+ /* printf( "-g Build from newest sources first.\n" ); */
+ printf( "-jx Run up to x shell commands concurrently.\n" );
+ printf( "-lx Limit actions to x number of seconds after which they are stopped.\n" );
+ printf( "-n Don't actually execute the updating actions.\n" );
+ printf( "-ox Write the updating actions to file x.\n" );
+ printf( "-px x=0, pipes action stdout and stderr merged into action output.\n" );
+ printf( "-q Quit quickly as soon as a target fails.\n" );
+ printf( "-sx=y Set variable x=y, overriding environment.\n" );
+ printf( "-tx Rebuild x, even if it is up-to-date.\n" );
+ printf( "-v Print the version of jam and exit.\n" );
+ printf( "--x Option is ignored.\n\n" );
+
+ exit( EXITBAD );
+ }
+
+ /* Version info. */
+ if ( ( s = getoptval( optv, 'v', 0 ) ) )
+ {
+ printf( "Boost.Jam " );
+ printf( "Version %s. %s.\n", VERSION, OSMINOR );
+ printf( " Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. \n" );
+ printf( " Copyright 2001 David Turner.\n" );
+ printf( " Copyright 2001-2004 David Abrahams.\n" );
+ printf( " Copyright 2002-2008 Rene Rivera.\n" );
+ printf( " Copyright 2003-2008 Vladimir Prus.\n" );
+
+ return EXITOK;
+ }
+
+ /* Pick up interesting options. */
+ if ( ( s = getoptval( optv, 'n', 0 ) ) )
+ globs.noexec++, globs.debug[2] = 1;
+
+ if ( ( s = getoptval( optv, 'p', 0 ) ) )
+ {
+ /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action
+ * stdout and stderr.
+ */
+ globs.pipe_action = atoi( s );
+ if ( ( 3 < globs.pipe_action ) || ( globs.pipe_action < 0 ) )
+ {
+ printf(
+ "Invalid pipe descriptor '%d', valid values are -p[0..3].\n",
+ globs.pipe_action );
+ exit( EXITBAD );
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'q', 0 ) ) )
+ globs.quitquick = 1;
+
+ if ( ( s = getoptval( optv, 'a', 0 ) ) )
+ anyhow++;
+
+ if ( ( s = getoptval( optv, 'j', 0 ) ) )
+ {
+ globs.jobs = atoi( s );
+ if (globs.jobs == 0)
+ {
+ printf("Invalid value for the '-j' option.\n");
+ exit(EXITBAD);
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'g', 0 ) ) )
+ globs.newestfirst = 1;
+
+ if ( ( s = getoptval( optv, 'l', 0 ) ) )
+ globs.timeout = atoi( s );
+
+ /* Turn on/off debugging */
+ for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
+ {
+ int i;
+
+ /* First -d, turn off defaults. */
+ if ( !n )
+ for ( i = 0; i < DEBUG_MAX; ++i )
+ globs.debug[i] = 0;
+
+ i = atoi( s );
+
+ if ( ( i < 0 ) || ( i >= DEBUG_MAX ) )
+ {
+ printf( "Invalid debug level '%s'.\n", s );
+ continue;
+ }
+
+ /* n turns on levels 1-n. */
+ /* +n turns on level n. */
+ if ( *s == '+' )
+ globs.debug[i] = 1;
+ else while ( i )
+ globs.debug[i--] = 1;
+ }
+
+ {
+ PROFILE_ENTER( MAIN );
+
+#ifdef HAVE_PYTHON
+ {
+ PROFILE_ENTER( MAIN_PYTHON );
+ Py_Initialize();
+ {
+ static PyMethodDef BjamMethods[] = {
+ {"call", bjam_call, METH_VARARGS,
+ "Call the specified bjam rule."},
+ {"import_rule", bjam_import_rule, METH_VARARGS,
+ "Imports Python callable to bjam."},
+ {"define_action", bjam_define_action, METH_VARARGS,
+ "Defines a command line action."},
+ {"variable", bjam_variable, METH_VARARGS,
+ "Obtains a variable from bjam's global module."},
+ {"backtrace", bjam_backtrace, METH_VARARGS,
+ "Returns bjam backtrace from the last call into Python."},
+ {"caller", bjam_caller, METH_VARARGS,
+ "Returns the module from which the last call into Python is made."},
+ {NULL, NULL, 0, NULL}
+ };
+
+ Py_InitModule( "bjam", BjamMethods );
+ }
+ PROFILE_EXIT( MAIN_PYTHON );
+ }
+#endif
+
+#ifndef NDEBUG
+ run_unit_tests();
+#endif
+#if YYDEBUG != 0
+ if ( DEBUG_PARSE )
+ yydebug = 1;
+#endif
+
+ /* Set JAMDATE. */
+ var_set( "JAMDATE", list_new( L0, outf_time(time(0)) ), VAR_SET );
+
+ /* Set JAM_VERSION. */
+ var_set( "JAM_VERSION",
+ list_new( list_new( list_new( L0,
+ newstr( VERSION_MAJOR_SYM ) ),
+ newstr( VERSION_MINOR_SYM ) ),
+ newstr( VERSION_PATCH_SYM ) ),
+ VAR_SET );
+
+ /* Set JAMUNAME. */
+#ifdef unix
+ {
+ struct utsname u;
+
+ if ( uname( &u ) >= 0 )
+ {
+ var_set( "JAMUNAME",
+ list_new(
+ list_new(
+ list_new(
+ list_new(
+ list_new( L0,
+ newstr( u.sysname ) ),
+ newstr( u.nodename ) ),
+ newstr( u.release ) ),
+ newstr( u.version ) ),
+ newstr( u.machine ) ), VAR_SET );
+ }
+ }
+#endif /* unix */
+
+ /* Load up environment variables. */
+
+ /* First into the global module, with splitting, for backward
+ * compatibility.
+ */
+ var_defines( use_environ, 1 );
+
+ /* Then into .ENVIRON, without splitting. */
+ enter_module( bindmodule(".ENVIRON") );
+ var_defines( use_environ, 0 );
+ exit_module( bindmodule(".ENVIRON") );
+
+ /*
+ * Jam defined variables OS & OSPLAT. We load them after environment, so
+ * that setting OS in environment does not change Jam's notion of the
+ * current platform.
+ */
+ var_defines( othersyms, 1 );
+
+ /* Load up variables set on command line. */
+ for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n )
+ {
+ char *symv[2];
+ symv[ 0 ] = s;
+ symv[ 1 ] = 0;
+ var_defines( symv, 1 );
+ enter_module( bindmodule(".ENVIRON") );
+ var_defines( symv, 0 );
+ exit_module( bindmodule(".ENVIRON") );
+ }
+
+ /* Set the ARGV to reflect the complete list of arguments of invocation.
+ */
+ for ( n = 0; n < arg_c; ++n )
+ var_set( "ARGV", list_new( L0, newstr( arg_v[n] ) ), VAR_APPEND );
+
+ /* Initialize built-in rules. */
+ load_builtins();
+
+ /* Add the targets in the command line to the update list. */
+ for ( n = 1; n < arg_c; ++n )
+ {
+ if ( arg_v[ n ][ 0 ] == '-' )
+ {
+ char * f = "-:l:d:j:f:gs:t:ano:qv";
+ for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break;
+ if ( ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n;
+ }
+ else
+ {
+ mark_target_for_updating( arg_v[ n ] );
+ }
+ }
+
+ if (!targets_to_update())
+ mark_target_for_updating("all");
+
+ /* Parse ruleset. */
+ {
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n )
+ parse_file( s, frame );
+
+ if ( !n )
+ parse_file( "+", frame );
+ }
+
+ status = yyanyerrors();
+
+ /* Manually touch -t targets. */
+ for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n )
+ touch_target( s );
+
+ /* If an output file is specified, set globs.cmdout to that. */
+ if ( ( s = getoptval( optv, 'o', 0 ) ) )
+ {
+ if ( !( globs.cmdout = fopen( s, "w" ) ) )
+ {
+ printf( "Failed to write to '%s'\n", s );
+ exit( EXITBAD );
+ }
+ ++globs.noexec;
+ }
+
+ /* The build system may set the PARALLELISM variable to override -j
+ options. */
+ {
+ LIST *p = L0;
+ p = var_get ("PARALLELISM");
+ if (p)
+ {
+ int j = atoi (p->string);
+ if (j == -1)
+ {
+ printf( "Invalid value of PARALLELISM: %s\n", p->string);
+ }
+ else
+ {
+ globs.jobs = j;
+ }
+ }
+ }
+
+ /* KEEP_GOING overrides -q option. */
+ {
+ LIST *p = L0;
+ p = var_get ("KEEP_GOING");
+ if (p)
+ {
+ int v = atoi (p->string);
+ if (v == 0)
+ globs.quitquick = 1;
+ else
+ globs.quitquick = 0;
+ }
+ }
+
+ /* Now make target. */
+ {
+ PROFILE_ENTER( MAIN_MAKE );
+
+ LIST * targets = targets_to_update();
+ if (targets)
+ {
+ int targets_count = list_length( targets );
+ const char * * targets2 = (const char * *)
+ BJAM_MALLOC( targets_count * sizeof( char * ) );
+ int n = 0;
+ for ( ; targets; targets = list_next( targets ) )
+ targets2[ n++ ] = targets->string;
+ status |= make( targets_count, targets2, anyhow );
+ free( targets );
+ }
+ else
+ {
+ status = last_update_now_status;
+ }
+
+ PROFILE_EXIT( MAIN_MAKE );
+ }
+
+ PROFILE_EXIT( MAIN );
+ }
+
+ if ( DEBUG_PROFILE )
+ profile_dump();
+
+ /* Widely scattered cleanup. */
+ var_done();
+ file_done();
+ rules_done();
+ stamps_done();
+ str_done();
+
+ /* Close cmdout. */
+ if ( globs.cmdout )
+ fclose( globs.cmdout );
+
+#ifdef HAVE_PYTHON
+ Py_Finalize();
+#endif
+
+ BJAM_MEM_CLOSE();
+
+ return status ? EXITBAD : EXITOK;
+}
+
+#if defined(_WIN32)
+#include <windows.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ DWORD ret = GetModuleFileName(NULL, buf, sizeof(buf));
+ if (ret == 0 || ret == sizeof(buf)) return NULL;
+ return strdup (buf);
+}
+#elif defined(__APPLE__) /* Not tested */
+#include <mach-o/dyld.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ uint32_t size = sizeof(buf);
+ int ret = _NSGetExecutablePath(buf, &size);
+ if (ret != 0) return NULL;
+ return strdup(buf);
+}
+#elif defined(sun) || defined(__sun) /* Not tested */
+#include <stdlib.h>
+
+char *executable_path(char *argv0) {
+ return strdup(getexecname());
+}
+#elif defined(__FreeBSD__)
+#include <sys/sysctl.h>
+char *executable_path(char *argv0) {
+ int mib[4];
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PATHNAME;
+ mib[3] = -1;
+ char buf[1024];
+ size_t size = sizeof(buf);
+ sysctl(mib, 4, buf, &size, NULL, 0);
+ if (size == 0 || size == sizeof(buf)) return NULL;
+ return strndup(buf, size);
+}
+#elif defined(__linux__)
+#include <unistd.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ ssize_t ret = readlink("/proc/self/exe", buf, sizeof(buf));
+ if (ret == 0 || ret == sizeof(buf)) return NULL;
+ return strndup(buf, ret);
+}
+#else
+char *executable_path(char *argv0) {
+ /* If argv0 is absolute path, assume it's the right absolute path. */
+ if (argv0[0] == "/")
+ return strdup(argv0);
+ return NULL;
+}
+#endif
diff --git a/jam-files/engine/jam.h b/jam-files/engine/jam.h
new file mode 100644
index 000000000..73a7a04c5
--- /dev/null
+++ b/jam-files/engine/jam.h
@@ -0,0 +1,579 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.h - includes and globals for jam
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 04/21/94 (seiwald) - DGUX is __DGUX__, not just __DGUX.
+ * 05/04/94 (seiwald) - new globs.jobs (-j jobs)
+ * 11/01/94 (wingerd) - let us define path of Jambase at compile time.
+ * 12/30/94 (wingerd) - changed command buffer size for NT (MS-DOS shell).
+ * 02/22/95 (seiwald) - Jambase now in /usr/local/lib.
+ * 04/30/95 (seiwald) - FreeBSD added. Live Free or Die.
+ * 05/10/95 (seiwald) - SPLITPATH character set up here.
+ * 08/20/95 (seiwald) - added LINUX.
+ * 08/21/95 (seiwald) - added NCR.
+ * 10/23/95 (seiwald) - added SCO.
+ * 01/03/96 (seiwald) - SINIX (nixdorf) added.
+ * 03/13/96 (seiwald) - Jambase now compiled in; remove JAMBASE variable.
+ * 04/29/96 (seiwald) - AIX now has 31 and 42 OSVERs.
+ * 11/21/96 (peterk) - added BeOS with MW CW mwcc
+ * 12/21/96 (seiwald) - OSPLAT now defined for NT.
+ * 07/19/99 (sickel) - Mac OS X Server and Client support added
+ * 02/18/00 (belmonte)- Support for Cygwin.
+ * 09/12/00 (seiwald) - OSSYMS split to OSMAJOR/OSMINOR/OSPLAT
+ * 12/29/00 (seiwald) - OSVER dropped.
+ */
+
+#ifndef JAM_H_VP_2003_08_01
+#define JAM_H_VP_2003_08_01
+
+#ifdef HAVE_PYTHON
+#include <Python.h>
+#endif
+
+/* Assume popen support is available unless known otherwise. */
+#define HAVE_POPEN 1
+
+/*
+ * VMS, OPENVMS
+ */
+
+#ifdef VMS
+
+#include <types.h>
+#include <file.h>
+#include <stat.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <stdlib.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+#include <unixlib.h>
+
+#define OSMINOR "OS=VMS"
+#define OSMAJOR "VMS=true"
+#define OS_VMS
+#define MAXLINE 1024 /* longest 'together' actions */
+#define SPLITPATH ','
+#define EXITOK 1
+#define EXITBAD 0
+#define DOWNSHIFT_PATHS
+
+/* This may be inaccurate. */
+#ifndef __DECC
+#define OSPLAT "OSPLAT=VAX"
+#endif
+
+#endif
+
+/*
+ * Windows NT
+ */
+
+#ifdef NT
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <malloc.h>
+#ifndef __MWERKS__
+ #include <memory.h>
+#endif
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "NT=true"
+#define OSMINOR "OS=NT"
+#define OS_NT
+#define SPLITPATH ';'
+/* Windows NT 3.51 only allows 996 chars per line, but we deal with the problem
+ * in "execnt.c".
+ */
+#define MAXLINE (maxline()) /* longest 'together' actions */
+#define USE_EXECNT
+#define USE_PATHUNIX
+#define PATH_DELIM '\\'
+#define DOWNSHIFT_PATHS
+
+/* AS400 cross-compile from NT. */
+
+#ifdef AS400
+ #undef OSMINOR
+ #undef OSMAJOR
+ #define OSMAJOR "AS400=true"
+ #define OSMINOR "OS=AS400"
+ #define OS_AS400
+#endif
+
+/* Metrowerks Standard Library on Windows. */
+
+#ifdef __MSL__
+ #undef HAVE_POPEN
+#endif
+
+# endif
+
+/*
+ * Windows MingW32
+ */
+
+#ifdef MINGW
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <malloc.h>
+#include <memory.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "MINGW=true"
+#define OSMINOR "OS=MINGW"
+#define OS_NT
+#define SPLITPATH ';'
+#define MAXLINE 996 /* longest 'together' actions */
+#define USE_EXECUNIX
+#define USE_PATHUNIX
+#define PATH_DELIM '\\'
+#define DOWNSHIFT_PATHS
+
+#endif
+
+/*
+ * OS2
+ */
+
+#ifdef __OS2__
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <malloc.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "OS2=true"
+#define OSMINOR "OS=OS2"
+#define OS_OS2
+#define SPLITPATH ';'
+#define MAXLINE 996 /* longest 'together' actions */
+#define USE_EXECUNIX
+#define USE_PATHUNIX
+#define PATH_DELIM '\\'
+#define DOWNSHIFT_PATHS
+
+#ifdef __EMX__
+ #define USE_FILEUNIX
+#endif
+
+#endif
+
+/*
+ * Macintosh MPW
+ */
+
+#ifdef macintosh
+
+#include <time.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+
+#define OSMAJOR "MAC=true"
+#define OSMINOR "OS=MAC"
+#define OS_MAC
+#define SPLITPATH ','
+
+#endif
+
+/*
+ * God fearing UNIX.
+ */
+
+#ifndef OSMINOR
+
+#define OSMAJOR "UNIX=true"
+#define USE_EXECUNIX
+#define USE_FILEUNIX
+#define USE_PATHUNIX
+#define PATH_DELIM '/'
+
+#ifdef _AIX
+ #define unix
+ #define MAXLINE 23552 /* 24k - 1k, longest 'together' actions */
+ #define OSMINOR "OS=AIX"
+ #define OS_AIX
+ #define NO_VFORK
+#endif
+#ifdef AMIGA
+ #define OSMINOR "OS=AMIGA"
+ #define OS_AMIGA
+#endif
+#ifdef __BEOS__
+ #define unix
+ #define OSMINOR "OS=BEOS"
+ #define OS_BEOS
+ #define NO_VFORK
+#endif
+#ifdef __bsdi__
+ #define OSMINOR "OS=BSDI"
+ #define OS_BSDI
+#endif
+#if defined (COHERENT) && defined (_I386)
+ #define OSMINOR "OS=COHERENT"
+ #define OS_COHERENT
+ #define NO_VFORK
+#endif
+#if defined(__cygwin__) || defined(__CYGWIN__)
+ #define OSMINOR "OS=CYGWIN"
+ #define OS_CYGWIN
+#endif
+#if defined(__FreeBSD__) && !defined(__DragonFly__)
+ #define OSMINOR "OS=FREEBSD"
+ #define OS_FREEBSD
+#endif
+#ifdef __DragonFly__
+ #define OSMINOR "OS=DRAGONFLYBSD"
+ #define OS_DRAGONFLYBSD
+#endif
+#ifdef __DGUX__
+ #define OSMINOR "OS=DGUX"
+ #define OS_DGUX
+#endif
+#ifdef __hpux
+ #define OSMINOR "OS=HPUX"
+ #define OS_HPUX
+#endif
+#ifdef __OPENNT
+ #define unix
+ #define OSMINOR "OS=INTERIX"
+ #define OS_INTERIX
+ #define NO_VFORK
+#endif
+#ifdef __sgi
+ #define OSMINOR "OS=IRIX"
+ #define OS_IRIX
+ #define NO_VFORK
+#endif
+#ifdef __ISC
+ #define OSMINOR "OS=ISC"
+ #define OS_ISC
+ #define NO_VFORK
+#endif
+#ifdef linux
+ #define OSMINOR "OS=LINUX"
+ #define OS_LINUX
+#endif
+#ifdef __Lynx__
+ #define OSMINOR "OS=LYNX"
+ #define OS_LYNX
+ #define NO_VFORK
+ #define unix
+#endif
+#ifdef __MACHTEN__
+ #define OSMINOR "OS=MACHTEN"
+ #define OS_MACHTEN
+#endif
+#ifdef mpeix
+ #define unix
+ #define OSMINOR "OS=MPEIX"
+ #define OS_MPEIX
+ #define NO_VFORK
+#endif
+#ifdef __MVS__
+ #define unix
+ #define OSMINOR "OS=MVS"
+ #define OS_MVS
+#endif
+#ifdef _ATT4
+ #define OSMINOR "OS=NCR"
+ #define OS_NCR
+#endif
+#ifdef __NetBSD__
+ #define unix
+ #define OSMINOR "OS=NETBSD"
+ #define OS_NETBSD
+ #define NO_VFORK
+#endif
+#ifdef __QNX__
+ #define unix
+ #ifdef __QNXNTO__
+ #define OSMINOR "OS=QNXNTO"
+ #define OS_QNXNTO
+ #else
+ #define OSMINOR "OS=QNX"
+ #define OS_QNX
+ #define NO_VFORK
+ #define MAXLINE 996
+ #endif
+#endif
+#ifdef NeXT
+ #ifdef __APPLE__
+ #define OSMINOR "OS=RHAPSODY"
+ #define OS_RHAPSODY
+ #else
+ #define OSMINOR "OS=NEXT"
+ #define OS_NEXT
+ #endif
+#endif
+#ifdef __APPLE__
+ #define unix
+ #define OSMINOR "OS=MACOSX"
+ #define OS_MACOSX
+#endif
+#ifdef __osf__
+ #ifndef unix
+ #define unix
+ #endif
+ #define OSMINOR "OS=OSF"
+ #define OS_OSF
+#endif
+#ifdef _SEQUENT_
+ #define OSMINOR "OS=PTX"
+ #define OS_PTX
+#endif
+#ifdef M_XENIX
+ #define OSMINOR "OS=SCO"
+ #define OS_SCO
+ #define NO_VFORK
+#endif
+#ifdef sinix
+ #define unix
+ #define OSMINOR "OS=SINIX"
+ #define OS_SINIX
+#endif
+#ifdef sun
+ #if defined(__svr4__) || defined(__SVR4)
+ #define OSMINOR "OS=SOLARIS"
+ #define OS_SOLARIS
+ #else
+ #define OSMINOR "OS=SUNOS"
+ #define OS_SUNOS
+ #endif
+#endif
+#ifdef ultrix
+ #define OSMINOR "OS=ULTRIX"
+ #define OS_ULTRIX
+#endif
+#ifdef _UNICOS
+ #define OSMINOR "OS=UNICOS"
+ #define OS_UNICOS
+#endif
+#if defined(__USLC__) && !defined(M_XENIX)
+ #define OSMINOR "OS=UNIXWARE"
+ #define OS_UNIXWARE
+#endif
+#ifdef __OpenBSD__
+ #define OSMINOR "OS=OPENBSD"
+ #define OS_OPENBSD
+ #define unix
+#endif
+#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__)
+ #define OSMINOR "OS=KFREEBSD"
+ #define OS_KFREEBSD
+#endif
+#ifndef OSMINOR
+ #define OSMINOR "OS=UNKNOWN"
+#endif
+
+/* All the UNIX includes */
+
+#include <sys/types.h>
+#include <sys/stat.h>
+
+#ifndef OS_MPEIX
+ #include <sys/file.h>
+#endif
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+
+#ifndef OS_QNX
+ #include <memory.h>
+#endif
+
+#ifndef OS_ULTRIX
+ #include <stdlib.h>
+#endif
+
+#if !defined( OS_BSDI ) && \
+ !defined( OS_FREEBSD ) && \
+ !defined( OS_DRAGONFLYBSD ) && \
+ !defined( OS_NEXT ) && \
+ !defined( OS_MACHTEN ) && \
+ !defined( OS_MACOSX ) && \
+ !defined( OS_RHAPSODY ) && \
+ !defined( OS_MVS ) && \
+ !defined( OS_OPENBSD )
+ #include <malloc.h>
+#endif
+
+#endif
+
+/*
+ * OSPLAT definitions - suppressed when it is a one-of-a-kind.
+ */
+
+#if defined( _M_PPC ) || \
+ defined( PPC ) || \
+ defined( ppc ) || \
+ defined( __powerpc__ ) || \
+ defined( __ppc__ )
+ #define OSPLAT "OSPLAT=PPC"
+#endif
+
+#if defined( _ALPHA_ ) || \
+ defined( __alpha__ )
+ #define OSPLAT "OSPLAT=AXP"
+#endif
+
+#if defined( _i386_ ) || \
+ defined( __i386__ ) || \
+ defined( __i386 ) || \
+ defined( _M_IX86 )
+ #define OSPLAT "OSPLAT=X86"
+#endif
+
+#if defined( __ia64__ ) || \
+ defined( __IA64__ ) || \
+ defined( __ia64 )
+ #define OSPLAT "OSPLAT=IA64"
+#endif
+
+#if defined( __x86_64__ ) || \
+ defined( __amd64__ ) || \
+ defined( _M_AMD64 )
+ #define OSPLAT "OSPLAT=X86_64"
+#endif
+
+
+#if defined( __sparc__ ) || \
+ defined( __sparc )
+ #define OSPLAT "OSPLAT=SPARC"
+#endif
+
+#ifdef __mips__
+ #define OSPLAT "OSPLAT=MIPS"
+#endif
+
+#ifdef __arm__
+ #define OSPLAT "OSPLAT=ARM"
+#endif
+
+#ifdef __s390__
+ #define OSPLAT "OSPLAT=390"
+#endif
+
+#ifdef __hppa
+ #define OSPLAT "OSPLAT=PARISC"
+#endif
+
+#ifndef OSPLAT
+ #define OSPLAT ""
+#endif
+
+/*
+ * Jam implementation misc.
+ */
+
+#ifndef MAXLINE
+ #define MAXLINE 102400 /* longest 'together' actions' */
+#endif
+
+#ifndef EXITOK
+ #define EXITOK 0
+ #define EXITBAD 1
+#endif
+
+#ifndef SPLITPATH
+ #define SPLITPATH ':'
+#endif
+
+/* You probably do not need to muck with these. */
+
+#define MAXSYM 1024 /* longest symbol in the environment */
+#define MAXJPATH 1024 /* longest filename */
+
+#define MAXJOBS 64 /* silently enforced -j limit */
+#define MAXARGC 32 /* words in $(JAMSHELL) */
+
+/* Jam private definitions below. */
+
+#define DEBUG_MAX 14
+
+
+struct globs
+{
+ int noexec;
+ int jobs;
+ int quitquick;
+ int newestfirst; /* build newest sources first */
+ int pipe_action;
+ char debug[ DEBUG_MAX ];
+ FILE * cmdout; /* print cmds, not run them */
+ long timeout; /* number of seconds to limit actions to,
+ * default 0 for no limit.
+ */
+ int dart; /* output build and test results formatted for Dart */
+};
+
+extern struct globs globs;
+
+#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */
+#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */
+#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */
+#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show progress of make0 */
+#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */
+
+#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */
+
+#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */
+
+#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */
+#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */
+#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show attempts at binding */
+
+#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */
+#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */
+#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */
+#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */
+#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */
+#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */
+#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */
+
+#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */
+#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */
+#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */
+#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show changes to fate in make0() */
+
+/* Everyone gets the memory definitions. */
+#include "mem.h"
+
+/* They also get the profile functions. */
+#include "debug.h"
+
+#endif
diff --git a/jam-files/engine/jambase.c b/jam-files/engine/jambase.c
new file mode 100644
index 000000000..b15282bc3
--- /dev/null
+++ b/jam-files/engine/jambase.c
@@ -0,0 +1,1691 @@
+/* Generated by mkjambase from Jambase */
+char *jambase[] = {
+/* Jambase */
+"if $(NT)\n",
+"{\n",
+"SLASH ?= \\\\ ;\n",
+"}\n",
+"SLASH ?= / ;\n",
+"rule find-to-root ( dir : patterns + )\n",
+"{\n",
+"local globs = [ GLOB $(dir) : $(patterns) ] ;\n",
+"while ! $(globs) && $(dir:P) != $(dir)\n",
+"{\n",
+"dir = $(dir:P) ;\n",
+"globs = [ GLOB $(dir) : $(patterns) ] ;\n",
+"}\n",
+"return $(globs) ;\n",
+"}\n",
+".boost-build-file = ;\n",
+".bootstrap-file = ;\n",
+"BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;\n",
+"if ! $(BOOST_BUILD_PATH) && $(UNIX)\n",
+"{\n",
+"BOOST_BUILD_PATH = /usr/share/boost-build ;\n",
+"}\n",
+"rule _poke ( module-name ? : variables + : value * )\n",
+"{\n",
+"module $(<)\n",
+"{\n",
+"$(>) = $(3) ;\n",
+"}\n",
+"}\n",
+"rule boost-build ( dir ? )\n",
+"{\n",
+"if $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Error: Illegal attempt to re-bootstrap the build system by invoking\" ;\n",
+"ECHO ;\n",
+"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;\n",
+"_poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;\n",
+"local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;\n",
+".bootstrap-file = $(bootstrap-file[1]) ;\n",
+"if ! $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Unable to load Boost.Build: could not find build system.\" ;\n",
+"ECHO --------------------------------------------------------- ;\n",
+"ECHO \"$(.boost-build-file) attempted to load the build system by invoking\" ;\n",
+"ECHO ;\n",
+"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
+"ECHO ;\n",
+"ECHO \"but we were unable to find \\\"bootstrap.jam\\\" in the specified directory\" ;\n",
+"ECHO \"or in BOOST_BUILD_PATH (searching \"$(BOOST_BUILD_PATH:J=\", \")\").\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
+"{\n",
+"ECHO \"notice: loading Boost.Build from\"\n",
+"[ NORMALIZE_PATH $(.bootstrap-file:D) ] ;\n",
+"}\n",
+"include $(.bootstrap-file) ;\n",
+"}\n",
+"if [ MATCH .*(b2).* : $(ARGV[1]:BL) ] \n",
+"|| [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]\n",
+"|| $(BOOST_ROOT) # A temporary measure so Jam works with Boost.Build v1.\n",
+"{\n",
+"local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;\n",
+"local self = [ SELF_PATH ] ;\n",
+"local boost-build-relative = ../../share/boost-build ;\n",
+"local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;\n",
+"local boost-build-files =\n",
+"[ find-to-root [ PWD ] : boost-build.jam ]\n",
+"[ GLOB $(self-based-path) : boost-build.jam ]\n",
+"[ GLOB $(search-path) : boost-build.jam ] ;\n",
+".boost-build-file = $(boost-build-files[1]) ;\n",
+"if ! $(.boost-build-file)\n",
+"{\n",
+"ECHO \"Unable to load Boost.Build: could not find \\\"boost-build.jam\\\"\" ;\n",
+"ECHO --------------------------------------------------------------- ;\n",
+"if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]\n",
+"{\n",
+"ECHO \"BOOST_ROOT must be set, either in the environment, or \" ;\n",
+"ECHO \"on the command-line with -sBOOST_ROOT=..., to the root\" ;\n",
+"ECHO \"of the boost installation.\" ;\n",
+"ECHO ;\n",
+"}\n",
+"ECHO \"Attempted search from\" [ PWD ] \"up to the root\" ;\n",
+"ECHO \"at\" $(self-based-path) ;\n",
+"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
+"{\n",
+"ECHO \"notice: found boost-build.jam at\"\n",
+"[ NORMALIZE_PATH $(.boost-build-file) ] ;\n",
+"}\n",
+"include $(.boost-build-file) ;\n",
+"if ! $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Unable to load Boost.Build\" ;\n",
+"ECHO -------------------------- ;\n",
+"ECHO \"\\\"$(.boost-build-file)\\\" was found by searching from\" [ PWD ] \"up to the root\" ;\n",
+"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
+"ECHO ;\n",
+"ECHO \"However, it failed to call the \\\"boost-build\\\" rule to indicate\" ;\n",
+"ECHO \"the location of the build system.\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"}\n",
+"else\n",
+"{\n",
+"if $(NT)\n",
+"{\n",
+"local SUPPORTED_TOOLSETS = \"BORLANDC\" \"VC7\" \"VISUALC\" \"VISUALC16\" \"INTELC\" \"WATCOM\"\n",
+"\"MINGW\" \"LCC\" ;\n",
+"TOOLSET = \"\" ;\n",
+"if $(JAM_TOOLSET)\n",
+"{\n",
+"local t ;\n",
+"for t in $(SUPPORTED_TOOLSETS)\n",
+"{\n",
+"$(t) = $($(t):J=\" \") ; # reconstitute paths with spaces in them\n",
+"if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }\n",
+"}\n",
+"if ! $(TOOLSET)\n",
+"{\n",
+"ECHO \"The JAM_TOOLSET environment variable is defined but its value\" ;\n",
+"ECHO \"is invalid, please use one of the following:\" ;\n",
+"ECHO ;\n",
+"for t in $(SUPPORTED_TOOLSETS) { ECHO \" \" $(t) ; }\n",
+"EXIT ;\n",
+"}\n",
+"}\n",
+"if ! $(TOOLSET)\n",
+"{\n",
+"if $(BCCROOT)\n",
+"{\n",
+"TOOLSET = BORLANDC ;\n",
+"BORLANDC = $(BCCROOT:J=\" \") ;\n",
+"}\n",
+"else if $(MSVC)\n",
+"{\n",
+"TOOLSET = VISUALC16 ;\n",
+"VISUALC16 = $(MSVC:J=\" \") ;\n",
+"}\n",
+"else if $(MSVCNT)\n",
+"{\n",
+"TOOLSET = VISUALC ;\n",
+"VISUALC = $(MSVCNT:J=\" \") ;\n",
+"}\n",
+"else if $(MSVCDir)\n",
+"{\n",
+"TOOLSET = VISUALC ;\n",
+"VISUALC = $(MSVCDir:J=\" \") ;\n",
+"}\n",
+"else if $(MINGW)\n",
+"{\n",
+"TOOLSET = MINGW ;\n",
+"}\n",
+"else\n",
+"{\n",
+"ECHO \"Jam cannot be run because, either:\" ;\n",
+"ECHO \" a. You didn't set BOOST_ROOT to indicate the root of your\" ;\n",
+"ECHO \" Boost installation.\" ;\n",
+"ECHO \" b. You are trying to use stock Jam but didn't indicate which\" ;\n",
+"ECHO \" compilation toolset to use. To do so, follow these simple\" ;\n",
+"ECHO \" instructions:\" ;\n",
+"ECHO ;\n",
+"ECHO \" - define one of the following environment variable, with the\" ;\n",
+"ECHO \" appropriate value according to this list:\" ;\n",
+"ECHO ;\n",
+"ECHO \" Variable Toolset Description\" ;\n",
+"ECHO ;\n",
+"ECHO \" BORLANDC Borland C++ BC++ install path\" ;\n",
+"ECHO \" VISUALC Microsoft Visual C++ VC++ install path\" ;\n",
+"ECHO \" VISUALC16 Microsoft Visual C++ 16 bit VC++ 16 bit install\" ;\n",
+"ECHO \" INTELC Intel C/C++ IC++ install path\" ;\n",
+"ECHO \" WATCOM Watcom C/C++ Watcom install path\" ;\n",
+"ECHO \" MINGW MinGW (gcc) MinGW install path\" ;\n",
+"ECHO \" LCC Win32-LCC LCC-Win32 install path\" ;\n",
+"ECHO ;\n",
+"ECHO \" - define the JAM_TOOLSET environment variable with the *name*\" ;\n",
+"ECHO \" of the toolset variable you want to use.\" ;\n",
+"ECHO ;\n",
+"ECHO \" e.g.: set VISUALC=C:\\\\Visual6\" ;\n",
+"ECHO \" set JAM_TOOLSET=VISUALC\" ;\n",
+"EXIT ;\n",
+"}\n",
+"}\n",
+"CP ?= copy ;\n",
+"RM ?= del /f/q ;\n",
+"SLASH ?= \\\\ ;\n",
+"SUFLIB ?= .lib ;\n",
+"SUFOBJ ?= .obj ;\n",
+"SUFEXE ?= .exe ;\n",
+"if $(TOOLSET) = BORLANDC\n",
+"{\n",
+"ECHO \"Compiler is Borland C++\" ;\n",
+"AR ?= tlib /C /P64 ;\n",
+"CC ?= bcc32 ;\n",
+"CCFLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus ;\n",
+"C++ ?= bcc32 ;\n",
+"C++FLAGS ?= -q -y -d -v -w-par -w-ccc -w-rch -w-pro -w-aus -P ;\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= $(CCFLAGS) ;\n",
+"STDLIBPATH ?= $(BORLANDC)\\\\lib ;\n",
+"STDHDRS ?= $(BORLANDC)\\\\include ;\n",
+"NOARSCAN ?= true ;\n",
+"}\n",
+"else if $(TOOLSET) = VISUALC16\n",
+"{\n",
+"ECHO \"Compiler is Microsoft Visual C++ 16 bit\" ;\n",
+"AR ?= lib /nologo ;\n",
+"CC ?= cl /nologo ;\n",
+"CCFLAGS ?= /D \\\"WIN\\\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= $(CCFLAGS) ;\n",
+"LINKLIBS ?=\n",
+"\\\"$(VISUALC16)\\\\lib\\\\mlibce.lib\\\"\n",
+"\\\"$(VISUALC16)\\\\lib\\\\oldnames.lib\\\"\n",
+";\n",
+"LINKLIBS ?= ;\n",
+"NOARSCAN ?= true ;\n",
+"OPTIM ?= \"\" ;\n",
+"STDHDRS ?= $(VISUALC16)\\\\include ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = VISUALC\n",
+"{\n",
+"ECHO \"Compiler is Microsoft Visual C++\" ;\n",
+"AR ?= lib ;\n",
+"AS ?= masm386 ;\n",
+"CC ?= cl /nologo ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= link /nologo ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= \\\"$(VISUALC)\\\\lib\\\\advapi32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\lib\\\\gdi32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\lib\\\\user32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\lib\\\\kernel32.lib\\\" ;\n",
+"OPTIM ?= \"\" ;\n",
+"STDHDRS ?= $(VISUALC)\\\\include ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = VC7\n",
+"{\n",
+"ECHO \"Compiler is Microsoft Visual C++ .NET\" ;\n",
+"AR ?= lib ;\n",
+"AS ?= masm386 ;\n",
+"CC ?= cl /nologo ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= link /nologo ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= \\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\advapi32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\gdi32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\user32.lib\\\"\n",
+"\\\"$(VISUALC)\\\\PlatformSDK\\\\lib\\\\kernel32.lib\\\" ;\n",
+"OPTIM ?= \"\" ;\n",
+"STDHDRS ?= \\\"$(VISUALC)\\\\include\\\"\n",
+"\\\"$(VISUALC)\\\\PlatformSDK\\\\include\\\" ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = INTELC\n",
+"{\n",
+"ECHO \"Compiler is Intel C/C++\" ;\n",
+"if ! $(VISUALC)\n",
+"{\n",
+"ECHO \"As a special exception, when using the Intel C++ compiler, you need\" ;\n",
+"ECHO \"to define the VISUALC environment variable to indicate the location\" ;\n",
+"ECHO \"of your Visual C++ installation. Aborting..\" ;\n",
+"EXIT ;\n",
+"}\n",
+"AR ?= lib ;\n",
+"AS ?= masm386 ;\n",
+"CC ?= icl /nologo ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= link /nologo ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= $(VISUALC)\\\\lib\\\\advapi32.lib\n",
+"$(VISUALC)\\\\lib\\\\kernel32.lib\n",
+";\n",
+"OPTIM ?= \"\" ;\n",
+"STDHDRS ?= $(INTELC)\\include $(VISUALC)\\\\include ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = WATCOM\n",
+"{\n",
+"ECHO \"Compiler is Watcom C/C++\" ;\n",
+"AR ?= wlib ;\n",
+"CC ?= wcc386 ;\n",
+"CCFLAGS ?= /zq /DWIN32 /I$(WATCOM)\\\\h ; # zq=quiet\n",
+"C++ ?= wpp386 ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"CP ?= copy ;\n",
+"DOT ?= . ;\n",
+"DOTDOT ?= .. ;\n",
+"LINK ?= wcl386 ;\n",
+"LINKFLAGS ?= /zq ; # zq=quiet\n",
+"LINKLIBS ?= ;\n",
+"MV ?= move ;\n",
+"NOARSCAN ?= true ;\n",
+"OPTIM ?= ;\n",
+"RM ?= del /f ;\n",
+"SLASH ?= \\\\ ;\n",
+"STDHDRS ?= $(WATCOM)\\\\h $(WATCOM)\\\\h\\\\nt ;\n",
+"SUFEXE ?= .exe ;\n",
+"SUFLIB ?= .lib ;\n",
+"SUFOBJ ?= .obj ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = MINGW\n",
+"{\n",
+"ECHO \"Compiler is GCC with Mingw\" ;\n",
+"AR ?= ar -ru ;\n",
+"CC ?= gcc ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= \"\" ;\n",
+"OPTIM ?= ;\n",
+"SUFOBJ = .o ;\n",
+"SUFLIB = .a ;\n",
+"SLASH = / ;\n",
+"}\n",
+"else if $(TOOLSET) = LCC\n",
+"{\n",
+"ECHO \"Compiler is Win32-LCC\" ;\n",
+"AR ?= lcclib ;\n",
+"CC ?= lcc ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= lcclnk ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= \"\" ;\n",
+"OPTIM ?= ;\n",
+"NOARSCAN = true ;\n",
+"}\n",
+"else\n",
+"{\n",
+"EXIT On NT, set BCCROOT, MSVCNT, MINGW or MSVC to the root of the\n",
+"Borland or Microsoft directories. ;\n",
+"}\n",
+"}\n",
+"else if $(OS2)\n",
+"{\n",
+"local SUPPORTED_TOOLSETS = \"EMX\" \"WATCOM\" ;\n",
+"TOOLSET = \"\" ;\n",
+"if $(JAM_TOOLSET)\n",
+"{\n",
+"local t ;\n",
+"for t in $(SUPPORTED_TOOLSETS)\n",
+"{\n",
+"$(t) = $($(t):J=\" \") ; # reconstitute paths with spaces in them\n",
+"if $(t) = $(JAM_TOOLSET) { TOOLSET = $(t) ; }\n",
+"}\n",
+"if ! $(TOOLSET)\n",
+"{\n",
+"ECHO \"The JAM_TOOLSET environment variable is defined but its value\" ;\n",
+"ECHO \"is invalid, please use one of the following:\" ;\n",
+"ECHO ;\n",
+"for t in $(SUPPORTED_TOOLSETS) { ECHO \" \" $(t) ; }\n",
+"EXIT ;\n",
+"}\n",
+"}\n",
+"if ! $(TOOLSET)\n",
+"{\n",
+"if $(watcom)\n",
+"{\n",
+"WATCOM = $(watcom:J=\" \") ;\n",
+"TOOLSET = WATCOM ;\n",
+"}\n",
+"else\n",
+"{\n",
+"ECHO \"Jam cannot be run because you didn't indicate which compilation toolset\" ;\n",
+"ECHO \"to use. To do so, follow these simple instructions:\" ;\n",
+"ECHO ;\n",
+"ECHO \" - define one of the following environment variable, with the\" ;\n",
+"ECHO \" appropriate value according to this list:\" ;\n",
+"ECHO ;\n",
+"ECHO \" Variable Toolset Description\" ;\n",
+"ECHO ;\n",
+"ECHO \" WATCOM Watcom C/C++ Watcom install path\" ;\n",
+"ECHO \" EMX EMX (gcc) EMX install path\" ;\n",
+"ECHO \" VISUALAGE IBM Visual Age C/C++ VisualAge install path\" ;\n",
+"ECHO ;\n",
+"ECHO \" - define the JAM_TOOLSET environment variable with the *name*\" ;\n",
+"ECHO \" of the toolset variable you want to use.\" ;\n",
+"ECHO ;\n",
+"ECHO \" e.g.: set WATCOM=C:\\WATCOM\" ;\n",
+"ECHO \" set JAM_TOOLSET=WATCOM\" ;\n",
+"ECHO ;\n",
+"EXIT ;\n",
+"}\n",
+"}\n",
+"RM = del /f ;\n",
+"CP = copy ;\n",
+"MV ?= move ;\n",
+"DOT ?= . ;\n",
+"DOTDOT ?= .. ;\n",
+"SUFLIB ?= .lib ;\n",
+"SUFOBJ ?= .obj ;\n",
+"SUFEXE ?= .exe ;\n",
+"if $(TOOLSET) = WATCOM\n",
+"{\n",
+"AR ?= wlib ;\n",
+"BINDIR ?= \\\\os2\\\\apps ;\n",
+"CC ?= wcc386 ;\n",
+"CCFLAGS ?= /zq /DOS2 /I$(WATCOM)\\\\h ; # zq=quiet\n",
+"C++ ?= wpp386 ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= wcl386 ;\n",
+"LINKFLAGS ?= /zq ; # zq=quiet\n",
+"LINKLIBS ?= ;\n",
+"NOARSCAN ?= true ;\n",
+"OPTIM ?= ;\n",
+"SLASH ?= \\\\ ;\n",
+"STDHDRS ?= $(WATCOM)\\\\h ;\n",
+"UNDEFFLAG ?= \"/u _\" ;\n",
+"}\n",
+"else if $(TOOLSET) = EMX\n",
+"{\n",
+"ECHO \"Compiler is GCC-EMX\" ;\n",
+"AR ?= ar -ru ;\n",
+"CC ?= gcc ;\n",
+"CCFLAGS ?= \"\" ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= \"\" ;\n",
+"OPTIM ?= ;\n",
+"SUFOBJ = .o ;\n",
+"SUFLIB = .a ;\n",
+"UNDEFFLAG ?= \"-U\" ;\n",
+"SLASH = / ;\n",
+"}\n",
+"else\n",
+"{\n",
+"EXIT \"Sorry, but the $(JAM_TOOLSET) toolset isn't supported for now\" ;\n",
+"}\n",
+"}\n",
+"else if $(VMS)\n",
+"{\n",
+"C++ ?= cxx ;\n",
+"C++FLAGS ?= ;\n",
+"CC ?= cc ;\n",
+"CCFLAGS ?= ;\n",
+"CHMOD ?= set file/prot= ;\n",
+"CP ?= copy/replace ;\n",
+"CRELIB ?= true ;\n",
+"DOT ?= [] ;\n",
+"DOTDOT ?= [-] ;\n",
+"EXEMODE ?= (w:e) ;\n",
+"FILEMODE ?= (w:r) ;\n",
+"HDRS ?= ;\n",
+"LINK ?= link ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"LINKLIBS ?= ;\n",
+"MKDIR ?= create/dir ;\n",
+"MV ?= rename ;\n",
+"OPTIM ?= \"\" ;\n",
+"RM ?= delete ;\n",
+"RUNVMS ?= mcr ;\n",
+"SHELLMODE ?= (w:er) ;\n",
+"SLASH ?= . ;\n",
+"STDHDRS ?= decc$library_include ;\n",
+"SUFEXE ?= .exe ;\n",
+"SUFLIB ?= .olb ;\n",
+"SUFOBJ ?= .obj ;\n",
+"switch $(OS)\n",
+"{\n",
+"case OPENVMS : CCFLAGS ?= /stand=vaxc ;\n",
+"case VMS : LINKLIBS ?= sys$library:vaxcrtl.olb/lib ;\n",
+"}\n",
+"}\n",
+"else if $(MAC)\n",
+"{\n",
+"local OPT ;\n",
+"CW ?= \"{CW}\" ;\n",
+"MACHDRS ?=\n",
+"\"$(UMACHDRS):Universal:Interfaces:CIncludes\"\n",
+"\"$(CW):MSL:MSL_C:MSL_Common:Include\"\n",
+"\"$(CW):MSL:MSL_C:MSL_MacOS:Include\" ;\n",
+"MACLIBS ?=\n",
+"\"$(CW):MacOS Support:Universal:Libraries:StubLibraries:Interfacelib\"\n",
+"\"$(CW):MacOS Support:Universal:Libraries:StubLibraries:Mathlib\" ;\n",
+"MPWLIBS ?=\n",
+"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib\"\n",
+"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW.Lib\" ;\n",
+"MPWNLLIBS ?=\n",
+"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL MPWCRuntime.lib\"\n",
+"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC MPW(NL).Lib\" ;\n",
+"SIOUXHDRS ?= ;\n",
+"SIOUXLIBS ?=\n",
+"\"$(CW):MacOS Support:Libraries:Runtime:Runtime PPC:MSL RuntimePPC.lib\"\n",
+"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL SIOUX.PPC.Lib\"\n",
+"\"$(CW):MSL:MSL_C:MSL_MacOS:Lib:PPC:MSL C.PPC.Lib\" ;\n",
+"C++ ?= mwcppc ;\n",
+"C++FLAGS ?= -w off -nomapcr ;\n",
+"CC ?= mwcppc ;\n",
+"CCFLAGS ?= -w off -nomapcr ;\n",
+"CP ?= duplicate -y ;\n",
+"DOT ?= \":\" ;\n",
+"DOTDOT ?= \"::\" ;\n",
+"HDRS ?= $(MACHDRS) $(MPWHDRS) ;\n",
+"LINK ?= mwlinkppc ;\n",
+"LINKFLAGS ?= -mpwtool -warn ;\n",
+"LINKLIBS ?= $(MACLIBS) $(MPWLIBS) ;\n",
+"MKDIR ?= newfolder ;\n",
+"MV ?= rename -y ;\n",
+"NOARSCAN ?= true ;\n",
+"OPTIM ?= ;\n",
+"RM ?= delete -y ;\n",
+"SLASH ?= \":\" ;\n",
+"STDHDRS ?= ;\n",
+"SUFLIB ?= .lib ;\n",
+"SUFOBJ ?= .o ;\n",
+"}\n",
+"else if $(OS) = BEOS && $(METROWERKS)\n",
+"{\n",
+"AR ?= mwld -xml -o ;\n",
+"BINDIR ?= /boot/apps ;\n",
+"CC ?= mwcc ;\n",
+"CCFLAGS ?= -nosyspath ;\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= -nosyspath ;\n",
+"FORTRAN ?= \"\" ;\n",
+"LIBDIR ?= /boot/develop/libraries ;\n",
+"LINK ?= mwld ;\n",
+"LINKFLAGS ?= \"\" ;\n",
+"MANDIR ?= /boot/documentation/\"Shell Tools\"/HTML ;\n",
+"NOARSCAN ?= true ;\n",
+"STDHDRS ?= /boot/develop/headers/posix ;\n",
+"}\n",
+"else if $(OS) = BEOS\n",
+"{\n",
+"BINDIR ?= /boot/apps ;\n",
+"CC ?= gcc ;\n",
+"C++ ?= $(CC) ;\n",
+"FORTRAN ?= \"\" ;\n",
+"LIBDIR ?= /boot/develop/libraries ;\n",
+"LINK ?= gcc ;\n",
+"LINKLIBS ?= -lnet ;\n",
+"NOARSCAN ?= true ;\n",
+"STDHDRS ?= /boot/develop/headers/posix ;\n",
+"}\n",
+"else if $(UNIX)\n",
+"{\n",
+"switch $(OS)\n",
+"{\n",
+"case AIX :\n",
+"LINKLIBS ?= -lbsd ;\n",
+"case AMIGA :\n",
+"CC ?= gcc ;\n",
+"YACC ?= \"bison -y\" ;\n",
+"case CYGWIN :\n",
+"CC ?= gcc ;\n",
+"CCFLAGS += -D__cygwin__ ;\n",
+"LEX ?= flex ;\n",
+"RANLIB ?= \"\" ;\n",
+"SUFEXE ?= .exe ;\n",
+"YACC ?= \"bison -y\" ;\n",
+"case DGUX :\n",
+"RANLIB ?= \"\" ;\n",
+"RELOCATE ?= true ;\n",
+"case HPUX :\n",
+"YACC = ;\n",
+"CFLAGS += -Ae ;\n",
+"CCFLAGS += -Ae ;\n",
+"RANLIB ?= \"\" ;\n",
+"case INTERIX :\n",
+"CC ?= gcc ;\n",
+"RANLIB ?= \"\" ;\n",
+"case IRIX :\n",
+"RANLIB ?= \"\" ;\n",
+"case MPEIX :\n",
+"CC ?= gcc ;\n",
+"C++ ?= gcc ;\n",
+"CCFLAGS += -D_POSIX_SOURCE ;\n",
+"HDRS += /usr/include ;\n",
+"RANLIB ?= \"\" ;\n",
+"NOARSCAN ?= true ;\n",
+"NOARUPDATE ?= true ;\n",
+"case MVS :\n",
+"RANLIB ?= \"\" ;\n",
+"case NEXT :\n",
+"AR ?= libtool -o ;\n",
+"RANLIB ?= \"\" ;\n",
+"case MACOSX :\n",
+"AR ?= libtool -o ;\n",
+"C++ ?= c++ ;\n",
+"MANDIR ?= /usr/local/share/man ;\n",
+"RANLIB ?= \"\" ;\n",
+"case NCR :\n",
+"RANLIB ?= \"\" ;\n",
+"case PTX :\n",
+"RANLIB ?= \"\" ;\n",
+"case QNX :\n",
+"AR ?= wlib ;\n",
+"CC ?= cc ;\n",
+"CCFLAGS ?= -Q ; # quiet\n",
+"C++ ?= $(CC) ;\n",
+"C++FLAGS ?= -Q ; # quiet\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= -Q ; # quiet\n",
+"NOARSCAN ?= true ;\n",
+"RANLIB ?= \"\" ;\n",
+"case SCO :\n",
+"RANLIB ?= \"\" ;\n",
+"RELOCATE ?= true ;\n",
+"case SINIX :\n",
+"RANLIB ?= \"\" ;\n",
+"case SOLARIS :\n",
+"RANLIB ?= \"\" ;\n",
+"AR ?= \"/usr/ccs/bin/ar ru\" ;\n",
+"case UNICOS :\n",
+"NOARSCAN ?= true ;\n",
+"OPTIM ?= -O0 ;\n",
+"case UNIXWARE :\n",
+"RANLIB ?= \"\" ;\n",
+"RELOCATE ?= true ;\n",
+"}\n",
+"CCFLAGS ?= ;\n",
+"C++FLAGS ?= $(CCFLAGS) ;\n",
+"CHMOD ?= chmod ;\n",
+"CHGRP ?= chgrp ;\n",
+"CHOWN ?= chown ;\n",
+"LEX ?= lex ;\n",
+"LINKFLAGS ?= $(CCFLAGS) ;\n",
+"LINKLIBS ?= ;\n",
+"OPTIM ?= -O ;\n",
+"RANLIB ?= ranlib ;\n",
+"YACC ?= yacc ;\n",
+"YACCFILES ?= y.tab ;\n",
+"YACCFLAGS ?= -d ;\n",
+"}\n",
+"AR ?= ar ru ;\n",
+"AS ?= as ;\n",
+"ASFLAGS ?= ;\n",
+"AWK ?= awk ;\n",
+"BINDIR ?= /usr/local/bin ;\n",
+"C++ ?= cc ;\n",
+"C++FLAGS ?= ;\n",
+"CC ?= cc ;\n",
+"CCFLAGS ?= ;\n",
+"CP ?= cp -f ;\n",
+"CRELIB ?= ;\n",
+"DOT ?= . ;\n",
+"DOTDOT ?= .. ;\n",
+"EXEMODE ?= 711 ;\n",
+"FILEMODE ?= 644 ;\n",
+"FORTRAN ?= f77 ;\n",
+"FORTRANFLAGS ?= ;\n",
+"HDRS ?= ;\n",
+"INSTALLGRIST ?= installed ;\n",
+"JAMFILE ?= Jamfile ;\n",
+"JAMRULES ?= Jamrules ;\n",
+"LEX ?= ;\n",
+"LIBDIR ?= /usr/local/lib ;\n",
+"LINK ?= $(CC) ;\n",
+"LINKFLAGS ?= ;\n",
+"LINKLIBS ?= ;\n",
+"LN ?= ln ;\n",
+"MANDIR ?= /usr/local/man ;\n",
+"MKDIR ?= mkdir ;\n",
+"MV ?= mv -f ;\n",
+"OPTIM ?= ;\n",
+"RCP ?= rcp ;\n",
+"RM ?= rm -f ;\n",
+"RSH ?= rsh ;\n",
+"SED ?= sed ;\n",
+"SHELLHEADER ?= \"#!/bin/sh\" ;\n",
+"SHELLMODE ?= 755 ;\n",
+"SLASH ?= / ;\n",
+"STDHDRS ?= /usr/include ;\n",
+"SUFEXE ?= \"\" ;\n",
+"SUFLIB ?= .a ;\n",
+"SUFOBJ ?= .o ;\n",
+"UNDEFFLAG ?= \"-u _\" ;\n",
+"YACC ?= ;\n",
+"YACCFILES ?= ;\n",
+"YACCFLAGS ?= ;\n",
+"HDRPATTERN =\n",
+"\"^[ ]*#[ ]*include[ ]*[<\\\"]([^\\\">]*)[\\\">].*$\" ;\n",
+"OSFULL = $(OS)$(OSVER)$(OSPLAT) $(OS)$(OSPLAT) $(OS)$(OSVER) $(OS) ;\n",
+"DEPENDS all : shell files lib exe obj ;\n",
+"DEPENDS all shell files lib exe obj : first ;\n",
+"NOTFILE all first shell files lib exe obj dirs clean uninstall ;\n",
+"ALWAYS clean uninstall ;\n",
+"rule As\n",
+"{\n",
+"DEPENDS $(<) : $(>) ;\n",
+"ASFLAGS on $(<) += $(ASFLAGS) $(SUBDIRASFLAGS) ;\n",
+"}\n",
+"rule Bulk\n",
+"{\n",
+"local i ;\n",
+"for i in $(>)\n",
+"{\n",
+"File $(i:D=$(<)) : $(i) ;\n",
+"}\n",
+"}\n",
+"rule Cc\n",
+"{\n",
+"local _h ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"CCFLAGS on $(<) += $(CCFLAGS) $(SUBDIRCCFLAGS) ;\n",
+"if $(RELOCATE)\n",
+"{\n",
+"CcMv $(<) : $(>) ;\n",
+"}\n",
+"_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
+"if $(VMS) && $(_h)\n",
+"{\n",
+"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n",
+"}\n",
+"else if $(MAC) && $(_h)\n",
+"{\n",
+"local _i _j ;\n",
+"_j = $(_h[1]) ;\n",
+"for _i in $(_h[2-])\n",
+"{\n",
+"_j = $(_j),$(_i) ;\n",
+"}\n",
+"MACINC on $(<) = \\\"$(_j)\\\" ;\n",
+"}\n",
+"}\n",
+"rule C++\n",
+"{\n",
+"local _h ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"C++FLAGS on $(<) += $(C++FLAGS) $(SUBDIRC++FLAGS) ;\n",
+"if $(RELOCATE)\n",
+"{\n",
+"CcMv $(<) : $(>) ;\n",
+"}\n",
+"_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
+"if $(VMS) && $(_h)\n",
+"{\n",
+"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n",
+"}\n",
+"else if $(MAC) && $(_h)\n",
+"{\n",
+"local _i _j ;\n",
+"_j = $(_h[1]) ;\n",
+"for _i in $(_h[2-])\n",
+"{\n",
+"_j = $(_j),$(_i) ;\n",
+"}\n",
+"MACINC on $(<) = \\\"$(_j)\\\" ;\n",
+"}\n",
+"}\n",
+"rule Chmod\n",
+"{\n",
+"if $(CHMOD) { Chmod1 $(<) ; }\n",
+"}\n",
+"rule File\n",
+"{\n",
+"DEPENDS files : $(<) ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"MODE on $(<) = $(FILEMODE) ;\n",
+"Chmod $(<) ;\n",
+"}\n",
+"rule Fortran\n",
+"{\n",
+"DEPENDS $(<) : $(>) ;\n",
+"}\n",
+"rule GenFile\n",
+"{\n",
+"local _t = [ FGristSourceFiles $(<) ] ;\n",
+"local _s = [ FAppendSuffix $(>[1]) : $(SUFEXE) ] ;\n",
+"Depends $(_t) : $(_s) $(>[2-]) ;\n",
+"GenFile1 $(_t) : $(_s) $(>[2-]) ;\n",
+"Clean clean : $(_t) ;\n",
+"}\n",
+"rule GenFile1\n",
+"{\n",
+"MakeLocate $(<) : $(LOCATE_SOURCE) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"}\n",
+"rule HardLink\n",
+"{\n",
+"DEPENDS files : $(<) ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"}\n",
+"rule HdrMacroFile\n",
+"{\n",
+"HDRMACRO $(<) ;\n",
+"}\n",
+"rule HdrRule\n",
+"{\n",
+"local s ;\n",
+"if $(HDRGRIST)\n",
+"{\n",
+"s = $(>:G=$(HDRGRIST)) ;\n",
+"} else {\n",
+"s = $(>) ;\n",
+"}\n",
+"INCLUDES $(<) : $(s) ;\n",
+"SEARCH on $(s) = $(HDRSEARCH) ;\n",
+"NOCARE $(s) ;\n",
+"HDRSEARCH on $(s) = $(HDRSEARCH) ;\n",
+"HDRSCAN on $(s) = $(HDRSCAN) ;\n",
+"HDRRULE on $(s) = $(HDRRULE) ;\n",
+"HDRGRIST on $(s) = $(HDRGRIST) ;\n",
+"}\n",
+"rule InstallInto\n",
+"{\n",
+"local i t ;\n",
+"t = $(>:G=$(INSTALLGRIST)) ;\n",
+"Depends install : $(t) ;\n",
+"Clean uninstall : $(t) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"MakeLocate $(t) : $(<) ;\n",
+"for i in $(>)\n",
+"{\n",
+"local tt = $(i:G=$(INSTALLGRIST)) ;\n",
+"Depends $(tt) : $(i) ;\n",
+"Install $(tt) : $(i) ;\n",
+"Chmod $(tt) ;\n",
+"if $(OWNER) && $(CHOWN)\n",
+"{\n",
+"Chown $(tt) ;\n",
+"OWNER on $(tt) = $(OWNER) ;\n",
+"}\n",
+"if $(GROUP) && $(CHGRP)\n",
+"{\n",
+"Chgrp $(tt) ;\n",
+"GROUP on $(tt) = $(GROUP) ;\n",
+"}\n",
+"}\n",
+"}\n",
+"rule InstallBin\n",
+"{\n",
+"local _t = [ FAppendSuffix $(>) : $(SUFEXE) ] ;\n",
+"InstallInto $(<) : $(_t) ;\n",
+"MODE on $(_t:G=installed) = $(EXEMODE) ;\n",
+"}\n",
+"rule InstallFile\n",
+"{\n",
+"InstallInto $(<) : $(>) ;\n",
+"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
+"}\n",
+"rule InstallLib\n",
+"{\n",
+"InstallInto $(<) : $(>) ;\n",
+"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
+"}\n",
+"rule InstallMan\n",
+"{\n",
+"local i s d ;\n",
+"for i in $(>)\n",
+"{\n",
+"switch $(i:S)\n",
+"{\n",
+"case .1 : s = 1 ; case .2 : s = 2 ; case .3 : s = 3 ;\n",
+"case .4 : s = 4 ; case .5 : s = 5 ; case .6 : s = 6 ;\n",
+"case .7 : s = 7 ; case .8 : s = 8 ; case .l : s = l ;\n",
+"case .n : s = n ; case .man : s = 1 ;\n",
+"}\n",
+"d = man$(s) ;\n",
+"InstallInto $(d:R=$(<)) : $(i) ;\n",
+"}\n",
+"MODE on $(>:G=installed) = $(FILEMODE) ;\n",
+"}\n",
+"rule InstallShell\n",
+"{\n",
+"InstallInto $(<) : $(>) ;\n",
+"MODE on $(>:G=installed) = $(SHELLMODE) ;\n",
+"}\n",
+"rule Lex\n",
+"{\n",
+"LexMv $(<) : $(>) ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"MakeLocate $(<) : $(LOCATE_SOURCE) ;\n",
+"Clean clean : $(<) ;\n",
+"}\n",
+"rule Library\n",
+"{\n",
+"LibraryFromObjects $(<) : $(>:S=$(SUFOBJ)) ;\n",
+"Objects $(>) ;\n",
+"}\n",
+"rule LibraryFromObjects\n",
+"{\n",
+"local _i _l _s ;\n",
+"_s = [ FGristFiles $(>) ] ;\n",
+"_l = $(<:S=$(SUFLIB)) ;\n",
+"if $(KEEPOBJS)\n",
+"{\n",
+"DEPENDS obj : $(_s) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"DEPENDS lib : $(_l) ;\n",
+"}\n",
+"if ! $(_l:D)\n",
+"{\n",
+"MakeLocate $(_l) $(_l)($(_s:BS)) : $(LOCATE_TARGET) ;\n",
+"}\n",
+"if $(NOARSCAN)\n",
+"{\n",
+"DEPENDS $(_l) : $(_s) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"DEPENDS $(_l) : $(_l)($(_s:BS)) ;\n",
+"for _i in $(_s)\n",
+"{\n",
+"DEPENDS $(_l)($(_i:BS)) : $(_i) ;\n",
+"}\n",
+"}\n",
+"Clean clean : $(_l) ;\n",
+"if $(CRELIB) { CreLib $(_l) : $(_s[1]) ; }\n",
+"Archive $(_l) : $(_s) ;\n",
+"if $(RANLIB) { Ranlib $(_l) ; }\n",
+"if ! ( $(NOARSCAN) || $(KEEPOBJS) ) { RmTemps $(_l) : $(_s) ; }\n",
+"}\n",
+"rule Link\n",
+"{\n",
+"MODE on $(<) = $(EXEMODE) ;\n",
+"Chmod $(<) ;\n",
+"}\n",
+"rule LinkLibraries\n",
+"{\n",
+"local _t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;\n",
+"DEPENDS $(_t) : $(>:S=$(SUFLIB)) ;\n",
+"NEEDLIBS on $(_t) += $(>:S=$(SUFLIB)) ;\n",
+"}\n",
+"rule Main\n",
+"{\n",
+"MainFromObjects $(<) : $(>:S=$(SUFOBJ)) ;\n",
+"Objects $(>) ;\n",
+"}\n",
+"rule MainFromObjects\n",
+"{\n",
+"local _s _t ;\n",
+"_s = [ FGristFiles $(>) ] ;\n",
+"_t = [ FAppendSuffix $(<) : $(SUFEXE) ] ;\n",
+"if $(_t) != $(<)\n",
+"{\n",
+"DEPENDS $(<) : $(_t) ;\n",
+"NOTFILE $(<) ;\n",
+"}\n",
+"DEPENDS exe : $(_t) ;\n",
+"DEPENDS $(_t) : $(_s) ;\n",
+"MakeLocate $(_t) : $(LOCATE_TARGET) ;\n",
+"Clean clean : $(_t) ;\n",
+"Link $(_t) : $(_s) ;\n",
+"}\n",
+"rule MakeLocate\n",
+"{\n",
+"if $(>)\n",
+"{\n",
+"LOCATE on $(<) = $(>) ;\n",
+"Depends $(<) : $(>[1]) ;\n",
+"MkDir $(>[1]) ;\n",
+"}\n",
+"}\n",
+"rule MkDir\n",
+"{\n",
+"NOUPDATE $(<) ;\n",
+"if $(<) != $(DOT) && ! $($(<)-mkdir)\n",
+"{\n",
+"local s ;\n",
+"$(<)-mkdir = true ;\n",
+"MkDir1 $(<) ;\n",
+"Depends dirs : $(<) ;\n",
+"s = $(<:P) ;\n",
+"if $(NT)\n",
+"{\n",
+"switch $(s)\n",
+"{\n",
+"case *: : s = ;\n",
+"case *:\\\\ : s = ;\n",
+"}\n",
+"}\n",
+"if $(s) && $(s) != $(<)\n",
+"{\n",
+"Depends $(<) : $(s) ;\n",
+"MkDir $(s) ;\n",
+"}\n",
+"else if $(s)\n",
+"{\n",
+"NOTFILE $(s) ;\n",
+"}\n",
+"}\n",
+"}\n",
+"rule Object\n",
+"{\n",
+"local h ;\n",
+"Clean clean : $(<) ;\n",
+"MakeLocate $(<) : $(LOCATE_TARGET) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"HDRS on $(<) = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n",
+"if $(SEARCH_SOURCE)\n",
+"{\n",
+"h = $(SEARCH_SOURCE) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"h = \"\" ;\n",
+"}\n",
+"HDRRULE on $(>) = HdrRule ;\n",
+"HDRSCAN on $(>) = $(HDRPATTERN) ;\n",
+"HDRSEARCH on $(>) = $(HDRS) $(SUBDIRHDRS) $(h) $(STDHDRS) ;\n",
+"HDRGRIST on $(>) = $(HDRGRIST) ;\n",
+"switch $(>:S)\n",
+"{\n",
+"case .asm : As $(<) : $(>) ;\n",
+"case .c : Cc $(<) : $(>) ;\n",
+"case .C : C++ $(<) : $(>) ;\n",
+"case .cc : C++ $(<) : $(>) ;\n",
+"case .cpp : C++ $(<) : $(>) ;\n",
+"case .f : Fortran $(<) : $(>) ;\n",
+"case .l : Cc $(<) : $(<:S=.c) ;\n",
+"Lex $(<:S=.c) : $(>) ;\n",
+"case .s : As $(<) : $(>) ;\n",
+"case .y : Cc $(<) : $(<:S=.c) ;\n",
+"Yacc $(<:S=.c) : $(>) ;\n",
+"case * : UserObject $(<) : $(>) ;\n",
+"}\n",
+"}\n",
+"rule ObjectCcFlags\n",
+"{\n",
+"CCFLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
+"}\n",
+"rule ObjectC++Flags\n",
+"{\n",
+"C++FLAGS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
+"}\n",
+"rule ObjectHdrs\n",
+"{\n",
+"HDRS on [ FGristFiles $(<:S=$(SUFOBJ)) ] += $(>) ;\n",
+"}\n",
+"rule Objects\n",
+"{\n",
+"local _i ;\n",
+"for _i in [ FGristFiles $(<) ]\n",
+"{\n",
+"Object $(_i:S=$(SUFOBJ)) : $(_i) ;\n",
+"DEPENDS obj : $(_i:S=$(SUFOBJ)) ;\n",
+"}\n",
+"}\n",
+"rule RmTemps\n",
+"{\n",
+"TEMPORARY $(>) ;\n",
+"}\n",
+"rule Setuid\n",
+"{\n",
+"MODE on [ FAppendSuffix $(<) : $(SUFEXE) ] = 4711 ;\n",
+"}\n",
+"rule Shell\n",
+"{\n",
+"DEPENDS shell : $(<) ;\n",
+"DEPENDS $(<) : $(>) ;\n",
+"SEARCH on $(>) = $(SEARCH_SOURCE) ;\n",
+"MODE on $(<) = $(SHELLMODE) ;\n",
+"Clean clean : $(<) ;\n",
+"Chmod $(<) ;\n",
+"}\n",
+"rule SubDir\n",
+"{\n",
+"local _r _s ;\n",
+"if ! $($(<[1]))\n",
+"{\n",
+"if ! $(<[1])\n",
+"{\n",
+"EXIT SubDir syntax error ;\n",
+"}\n",
+"$(<[1]) = [ FSubDir $(<[2-]) ] ;\n",
+"}\n",
+"if ! $($(<[1])-included)\n",
+"{\n",
+"$(<[1])-included = TRUE ;\n",
+"_r = $($(<[1])RULES) ;\n",
+"if ! $(_r)\n",
+"{\n",
+"_r = $(JAMRULES:R=$($(<[1]))) ;\n",
+"}\n",
+"include $(_r) ;\n",
+"}\n",
+"_s = [ FDirName $(<[2-]) ] ;\n",
+"SUBDIR = $(_s:R=$($(<[1]))) ;\n",
+"SUBDIR_TOKENS = $(<[2-]) ;\n",
+"SEARCH_SOURCE = $(SUBDIR) ;\n",
+"LOCATE_SOURCE = $(ALL_LOCATE_TARGET) $(SUBDIR) ;\n",
+"LOCATE_TARGET = $(ALL_LOCATE_TARGET) $(SUBDIR) ;\n",
+"SOURCE_GRIST = [ FGrist $(<[2-]) ] ;\n",
+"SUBDIRCCFLAGS = ;\n",
+"SUBDIRC++FLAGS = ;\n",
+"SUBDIRHDRS = ;\n",
+"}\n",
+"rule SubDirCcFlags\n",
+"{\n",
+"SUBDIRCCFLAGS += $(<) ;\n",
+"}\n",
+"rule SubDirC++Flags\n",
+"{\n",
+"SUBDIRC++FLAGS += $(<) ;\n",
+"}\n",
+"rule SubDirHdrs\n",
+"{\n",
+"SUBDIRHDRS += $(<) ;\n",
+"}\n",
+"rule SubInclude\n",
+"{\n",
+"local _s ;\n",
+"if ! $($(<[1]))\n",
+"{\n",
+"EXIT Top level of source tree has not been set with $(<[1]) ;\n",
+"}\n",
+"_s = [ FDirName $(<[2-]) ] ;\n",
+"include $(JAMFILE:D=$(_s):R=$($(<[1]))) ;\n",
+"}\n",
+"rule Undefines\n",
+"{\n",
+"UNDEFS on [ FAppendSuffix $(<) : $(SUFEXE) ] += $(UNDEFFLAG)$(>) ;\n",
+"}\n",
+"rule UserObject\n",
+"{\n",
+"EXIT \"Unknown suffix on\" $(>) \"- see UserObject rule in Jamfile(5).\" ;\n",
+"}\n",
+"rule Yacc\n",
+"{\n",
+"local _h ;\n",
+"_h = $(<:BS=.h) ;\n",
+"MakeLocate $(<) $(_h) : $(LOCATE_SOURCE) ;\n",
+"if $(YACC)\n",
+"{\n",
+"DEPENDS $(<) $(_h) : $(>) ;\n",
+"Yacc1 $(<) $(_h) : $(>) ;\n",
+"YaccMv $(<) $(_h) : $(>) ;\n",
+"Clean clean : $(<) $(_h) ;\n",
+"}\n",
+"INCLUDES $(<) : $(_h) ;\n",
+"}\n",
+"rule FGrist\n",
+"{\n",
+"local _g _i ;\n",
+"_g = $(<[1]) ;\n",
+"for _i in $(<[2-])\n",
+"{\n",
+"_g = $(_g)!$(_i) ;\n",
+"}\n",
+"return $(_g) ;\n",
+"}\n",
+"rule FGristFiles\n",
+"{\n",
+"if ! $(SOURCE_GRIST)\n",
+"{\n",
+"return $(<) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"return $(<:G=$(SOURCE_GRIST)) ;\n",
+"}\n",
+"}\n",
+"rule FGristSourceFiles\n",
+"{\n",
+"if ! $(SOURCE_GRIST)\n",
+"{\n",
+"return $(<) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"local _i _o ;\n",
+"for _i in $(<)\n",
+"{\n",
+"switch $(_i)\n",
+"{\n",
+"case *.h : _o += $(_i) ;\n",
+"case * : _o += $(_i:G=$(SOURCE_GRIST)) ;\n",
+"}\n",
+"}\n",
+"return $(_o) ;\n",
+"}\n",
+"}\n",
+"rule FConcat\n",
+"{\n",
+"local _t _r ;\n",
+"$(_r) = $(<[1]) ;\n",
+"for _t in $(<[2-])\n",
+"{\n",
+"$(_r) = $(_r)$(_t) ;\n",
+"}\n",
+"return $(_r) ;\n",
+"}\n",
+"rule FSubDir\n",
+"{\n",
+"local _i _d ;\n",
+"if ! $(<[1])\n",
+"{\n",
+"_d = $(DOT) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"_d = $(DOTDOT) ;\n",
+"for _i in $(<[2-])\n",
+"{\n",
+"_d = $(_d:R=$(DOTDOT)) ;\n",
+"}\n",
+"}\n",
+"return $(_d) ;\n",
+"}\n",
+"rule FDirName\n",
+"{\n",
+"local _s _i ;\n",
+"if ! $(<)\n",
+"{\n",
+"_s = $(DOT) ;\n",
+"}\n",
+"else if $(VMS)\n",
+"{\n",
+"switch $(<[1])\n",
+"{\n",
+"case *:* : _s = $(<[1]) ;\n",
+"case \\\\[*\\\\] : _s = $(<[1]) ;\n",
+"case * : _s = [.$(<[1])] ;\n",
+"}\n",
+"for _i in [.$(<[2-])]\n",
+"{\n",
+"_s = $(_i:R=$(_s)) ;\n",
+"}\n",
+"}\n",
+"else if $(MAC)\n",
+"{\n",
+"_s = $(DOT) ;\n",
+"for _i in $(<)\n",
+"{\n",
+"_s = $(_i:R=$(_s)) ;\n",
+"}\n",
+"}\n",
+"else\n",
+"{\n",
+"_s = $(<[1]) ;\n",
+"for _i in $(<[2-])\n",
+"{\n",
+"_s = $(_i:R=$(_s)) ;\n",
+"}\n",
+"}\n",
+"return $(_s) ;\n",
+"}\n",
+"rule _makeCommon\n",
+"{\n",
+"if $($(<)[1]) && $($(<)[1]) = $($(>)[1])\n",
+"{\n",
+"$(<) = $($(<)[2-]) ;\n",
+"$(>) = $($(>)[2-]) ;\n",
+"_makeCommon $(<) : $(>) ;\n",
+"}\n",
+"}\n",
+"rule FRelPath\n",
+"{\n",
+"local _l _r ;\n",
+"_l = $(<) ;\n",
+"_r = $(>) ;\n",
+"_makeCommon _l : _r ;\n",
+"_l = [ FSubDir $(_l) ] ;\n",
+"_r = [ FDirName $(_r) ] ;\n",
+"if $(_r) = $(DOT) {\n",
+"return $(_l) ;\n",
+"} else {\n",
+"return $(_r:R=$(_l)) ;\n",
+"}\n",
+"}\n",
+"rule FAppendSuffix\n",
+"{\n",
+"if $(>)\n",
+"{\n",
+"local _i _o ;\n",
+"for _i in $(<)\n",
+"{\n",
+"if $(_i:S)\n",
+"{\n",
+"_o += $(_i) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"_o += $(_i:S=$(>)) ;\n",
+"}\n",
+"}\n",
+"return $(_o) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"return $(<) ;\n",
+"}\n",
+"}\n",
+"rule unmakeDir\n",
+"{\n",
+"if $(>[1]:D) && $(>[1]:D) != $(>[1]) && $(>[1]:D) != \\\\\\\\\n",
+"{\n",
+"unmakeDir $(<) : $(>[1]:D) $(>[1]:BS) $(>[2-]) ;\n",
+"}\n",
+"else\n",
+"{\n",
+"$(<) = $(>) ;\n",
+"}\n",
+"}\n",
+"rule FConvertToSlashes\n",
+"{\n",
+"local _d, _s, _i ;\n",
+"unmakeDir _d : $(<) ;\n",
+"_s = $(_d[1]) ;\n",
+"for _i in $(_d[2-])\n",
+"{\n",
+"_s = $(_s)/$(_i) ;\n",
+"}\n",
+"return $(_s) ;\n",
+"}\n",
+"actions updated together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) $(>)\n",
+"}\n",
+"actions As\n",
+"{\n",
+"$(AS) $(ASFLAGS) -I$(HDRS) -o $(<) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o $(<) $(>)\n",
+"}\n",
+"actions Chgrp\n",
+"{\n",
+"$(CHGRP) $(GROUP) $(<)\n",
+"}\n",
+"actions Chmod1\n",
+"{\n",
+"$(CHMOD) $(MODE) $(<)\n",
+"}\n",
+"actions Chown\n",
+"{\n",
+"$(CHOWN) $(OWNER) $(<)\n",
+"}\n",
+"actions piecemeal together existing Clean\n",
+"{\n",
+"$(RM) $(>)\n",
+"}\n",
+"actions File\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"actions GenFile1\n",
+"{\n",
+"$(>[1]) $(<) $(>[2-])\n",
+"}\n",
+"actions Fortran\n",
+"{\n",
+"$(FORTRAN) $(FORTRANFLAGS) -o $(<) $(>)\n",
+"}\n",
+"actions HardLink\n",
+"{\n",
+"$(RM) $(<) && $(LN) $(>) $(<)\n",
+"}\n",
+"actions Install\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"actions Lex\n",
+"{\n",
+"$(LEX) $(>)\n",
+"}\n",
+"actions LexMv\n",
+"{\n",
+"$(MV) lex.yy.c $(<)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"actions MkDir1\n",
+"{\n",
+"$(MKDIR) $(<)\n",
+"}\n",
+"actions together Ranlib\n",
+"{\n",
+"$(RANLIB) $(<)\n",
+"}\n",
+"actions quietly updated piecemeal together RmTemps\n",
+"{\n",
+"$(RM) $(>)\n",
+"}\n",
+"actions Shell\n",
+"{\n",
+"$(AWK) '\n",
+"NR == 1 { print \"$(SHELLHEADER)\" }\n",
+"NR == 1 && /^[#:]/ { next }\n",
+"/^##/ { next }\n",
+"{ print }\n",
+"' < $(>) > $(<)\n",
+"}\n",
+"actions Yacc1\n",
+"{\n",
+"$(YACC) $(YACCFLAGS) $(>)\n",
+"}\n",
+"actions YaccMv\n",
+"{\n",
+"$(MV) $(YACCFILES).c $(<[1])\n",
+"$(MV) $(YACCFILES).h $(<[2])\n",
+"}\n",
+"if $(RELOCATE)\n",
+"{\n",
+"actions C++\n",
+"{\n",
+"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) $(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) $(>)\n",
+"}\n",
+"actions ignore CcMv\n",
+"{\n",
+"[ $(<) != $(>:BS=$(SUFOBJ)) ] && $(MV) $(>:BS=$(SUFOBJ)) $(<)\n",
+"}\n",
+"}\n",
+"if $(NOARUPDATE)\n",
+"{\n",
+"actions Archive\n",
+"{\n",
+"$(AR) $(<) $(>)\n",
+"}\n",
+"}\n",
+"if $(NT)\n",
+"{\n",
+"if $(TOOLSET) = VISUALC || $(TOOLSET) = VC7 || $(TOOLSET) = INTELC\n",
+"{\n",
+"actions updated together piecemeal Archive\n",
+"{\n",
+"if exist $(<) set _$(<:B)_=$(<)\n",
+"$(AR) /out:$(<) %_$(<:B)_% $(>)\n",
+"}\n",
+"actions As\n",
+"{\n",
+"$(AS) /Ml /p /v /w2 $(>) $(<) ,nul,nul;\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /I$(STDHDRS) /Tp$(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = VISUALC16\n",
+"{\n",
+"actions updated together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) -+$(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) /c $(CCFLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) /c $(C++FLAGS) $(OPTIM) /Fo$(<) /I$(HDRS) /Tp$(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) /out:$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = BORLANDC\n",
+"{\n",
+"actions updated together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) -+$(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) -e$(<) $(LINKFLAGS) $(UNDEFS) -L$(LINKLIBS) $(NEEDLIBS) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = MINGW\n",
+"{\n",
+"actions together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) $(>:T)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = WATCOM\n",
+"{\n",
+"actions together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) +-$(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"actions Shell\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = LCC\n",
+"{\n",
+"actions together piecemeal Archive\n",
+"{\n",
+"$(AR) /out:$(<) $(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) $(CCFLAGS) $(OPTIM) -Fo$(<) -I$(HDRS) $(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) -o $(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"actions Shell\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"}\n",
+"}\n",
+"else if $(OS2)\n",
+"{\n",
+"if $(TOOLSET) = WATCOM\n",
+"{\n",
+"actions together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) +-$(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) $(CCFLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) $(C++FLAGS) $(OPTIM) /Fo=$(<) /I$(HDRS) $(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) $(LINKFLAGS) /Fe=$(<) $(UNDEFS) $(>) $(NEEDLIBS) $(LINKLIBS)\n",
+"}\n",
+"actions Shell\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"}\n",
+"else if $(TOOLSET) = EMX\n",
+"{\n",
+"actions together piecemeal Archive\n",
+"{\n",
+"$(AR) $(<) $(>:T)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC) -c $(CCFLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++) -c $(C++FLAGS) $(OPTIM) -I$(HDRS) -o$(<) $(>)\n",
+"}\n",
+"}\n",
+"}\n",
+"else if $(VMS)\n",
+"{\n",
+"actions updated together piecemeal Archive\n",
+"{\n",
+"lib/replace $(<) $(>[1]) ,$(>[2-])\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"$(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"$(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>)\n",
+"}\n",
+"actions piecemeal together existing Clean\n",
+"{\n",
+"$(RM) $(>[1]);* ,$(>[2-]);*\n",
+"}\n",
+"actions together quietly CreLib\n",
+"{\n",
+"if f$search(\"$(<)\") .eqs. \"\" then lib/create $(<)\n",
+"}\n",
+"actions GenFile1\n",
+"{\n",
+"mcr $(>[1]) $(<) $(>[2-])\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS)\n",
+"}\n",
+"actions quietly updated piecemeal together RmTemps\n",
+"{\n",
+"$(RM) $(>[1]);* ,$(>[2-]);*\n",
+"}\n",
+"actions Shell\n",
+"{\n",
+"$(CP) $(>) $(<)\n",
+"}\n",
+"}\n",
+"else if $(MAC)\n",
+"{\n",
+"actions together Archive\n",
+"{\n",
+"$(LINK) -library -o $(<) $(>)\n",
+"}\n",
+"actions Cc\n",
+"{\n",
+"set -e MWCincludes $(MACINC)\n",
+"$(CC) -o $(<) $(CCFLAGS) $(OPTIM) $(>)\n",
+"}\n",
+"actions C++\n",
+"{\n",
+"set -e MWCincludes $(MACINC)\n",
+"$(CC) -o $(<) $(C++FLAGS) $(OPTIM) $(>)\n",
+"}\n",
+"actions Link bind NEEDLIBS\n",
+"{\n",
+"$(LINK) -o $(<) $(LINKFLAGS) $(>) $(NEEDLIBS) \"$(LINKLIBS)\"\n",
+"}\n",
+"}\n",
+"rule BULK { Bulk $(<) : $(>) ; }\n",
+"rule FILE { File $(<) : $(>) ; }\n",
+"rule HDRRULE { HdrRule $(<) : $(>) ; }\n",
+"rule INSTALL { Install $(<) : $(>) ; }\n",
+"rule LIBRARY { Library $(<) : $(>) ; }\n",
+"rule LIBS { LinkLibraries $(<) : $(>) ; }\n",
+"rule LINK { Link $(<) : $(>) ; }\n",
+"rule MAIN { Main $(<) : $(>) ; }\n",
+"rule SETUID { Setuid $(<) ; }\n",
+"rule SHELL { Shell $(<) : $(>) ; }\n",
+"rule UNDEFINES { Undefines $(<) : $(>) ; }\n",
+"rule INSTALLBIN { InstallBin $(BINDIR) : $(<) ; }\n",
+"rule INSTALLLIB { InstallLib $(LIBDIR) : $(<) ; }\n",
+"rule INSTALLMAN { InstallMan $(MANDIR) : $(<) ; }\n",
+"rule addDirName { $(<) += [ FDirName $(>) ] ; }\n",
+"rule makeDirName { $(<) = [ FDirName $(>) ] ; }\n",
+"rule makeGristedName { $(<) = [ FGristSourceFiles $(>) ] ; }\n",
+"rule makeRelPath { $(<[1]) = [ FRelPath $(<[2-]) : $(>) ] ; }\n",
+"rule makeSuffixed { $(<[1]) = [ FAppendSuffix $(>) : $(<[2]) ] ; }\n",
+"{\n",
+"if $(JAMFILE) { include $(JAMFILE) ; }\n",
+"}\n",
+"}\n",
+0 };
diff --git a/jam-files/engine/jambase.h b/jam-files/engine/jambase.h
new file mode 100644
index 000000000..c05ec7922
--- /dev/null
+++ b/jam-files/engine/jambase.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * jambase.h - declaration for the internal jambase
+ *
+ * The file Jambase is turned into a C array of strings in jambase.c
+ * so that it can be built in to the executable. This is the
+ * declaration for that array.
+ */
+
+extern char *jambase[];
diff --git a/jam-files/engine/jamgram.c b/jam-files/engine/jamgram.c
new file mode 100644
index 000000000..b1fa0835d
--- /dev/null
+++ b/jam-files/engine/jamgram.c
@@ -0,0 +1,1830 @@
+/* A Bison parser, made by GNU Bison 1.875. */
+
+/* Skeleton parser for Yacc-like parsing with Bison,
+ Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2, or (at your option)
+ any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+/* As a special exception, when this file is copied by Bison into a
+ Bison output file, you may use that output file without restriction.
+ This special exception was added by the Free Software Foundation
+ in version 1.24 of Bison. */
+
+/* Written by Richard Stallman by simplifying the original so called
+ ``semantic'' parser. */
+
+/* All symbols defined below should begin with yy or YY, to avoid
+ infringing on user name space. This should be done even for local
+ variables, as they might otherwise be expanded by user macros.
+ There are some unavoidable exceptions within include files to
+ define necessary library symbols; they are noted "INFRINGES ON
+ USER NAME SPACE" below. */
+
+/* Identify Bison output. */
+#define YYBISON 1
+
+/* Skeleton name. */
+#define YYSKELETON_NAME "yacc.c"
+
+/* Pure parsers. */
+#define YYPURE 0
+
+/* Using locations. */
+#define YYLSP_NEEDED 0
+
+
+
+/* Tokens. */
+#ifndef YYTOKENTYPE
+# define YYTOKENTYPE
+ /* Put the tokens into the symbol table, so that GDB and other debuggers
+ know about them. */
+ enum yytokentype {
+ _BANG_t = 258,
+ _BANG_EQUALS_t = 259,
+ _AMPER_t = 260,
+ _AMPERAMPER_t = 261,
+ _LPAREN_t = 262,
+ _RPAREN_t = 263,
+ _PLUS_EQUALS_t = 264,
+ _COLON_t = 265,
+ _SEMIC_t = 266,
+ _LANGLE_t = 267,
+ _LANGLE_EQUALS_t = 268,
+ _EQUALS_t = 269,
+ _RANGLE_t = 270,
+ _RANGLE_EQUALS_t = 271,
+ _QUESTION_EQUALS_t = 272,
+ _LBRACKET_t = 273,
+ _RBRACKET_t = 274,
+ ACTIONS_t = 275,
+ BIND_t = 276,
+ CASE_t = 277,
+ CLASS_t = 278,
+ DEFAULT_t = 279,
+ ELSE_t = 280,
+ EXISTING_t = 281,
+ FOR_t = 282,
+ IF_t = 283,
+ IGNORE_t = 284,
+ IN_t = 285,
+ INCLUDE_t = 286,
+ LOCAL_t = 287,
+ MODULE_t = 288,
+ ON_t = 289,
+ PIECEMEAL_t = 290,
+ QUIETLY_t = 291,
+ RETURN_t = 292,
+ RULE_t = 293,
+ SWITCH_t = 294,
+ TOGETHER_t = 295,
+ UPDATED_t = 296,
+ WHILE_t = 297,
+ _LBRACE_t = 298,
+ _BAR_t = 299,
+ _BARBAR_t = 300,
+ _RBRACE_t = 301,
+ ARG = 302,
+ STRING = 303
+ };
+#endif
+#define _BANG_t 258
+#define _BANG_EQUALS_t 259
+#define _AMPER_t 260
+#define _AMPERAMPER_t 261
+#define _LPAREN_t 262
+#define _RPAREN_t 263
+#define _PLUS_EQUALS_t 264
+#define _COLON_t 265
+#define _SEMIC_t 266
+#define _LANGLE_t 267
+#define _LANGLE_EQUALS_t 268
+#define _EQUALS_t 269
+#define _RANGLE_t 270
+#define _RANGLE_EQUALS_t 271
+#define _QUESTION_EQUALS_t 272
+#define _LBRACKET_t 273
+#define _RBRACKET_t 274
+#define ACTIONS_t 275
+#define BIND_t 276
+#define CASE_t 277
+#define CLASS_t 278
+#define DEFAULT_t 279
+#define ELSE_t 280
+#define EXISTING_t 281
+#define FOR_t 282
+#define IF_t 283
+#define IGNORE_t 284
+#define IN_t 285
+#define INCLUDE_t 286
+#define LOCAL_t 287
+#define MODULE_t 288
+#define ON_t 289
+#define PIECEMEAL_t 290
+#define QUIETLY_t 291
+#define RETURN_t 292
+#define RULE_t 293
+#define SWITCH_t 294
+#define TOGETHER_t 295
+#define UPDATED_t 296
+#define WHILE_t 297
+#define _LBRACE_t 298
+#define _BAR_t 299
+#define _BARBAR_t 300
+#define _RBRACE_t 301
+#define ARG 302
+#define STRING 303
+
+
+
+
+/* Copy the first part of user declarations. */
+#line 96 "jamgram.y"
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "newstr.h"
+#include "rules.h"
+
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 (LIST *(*)(PARSE *, FRAME *))0
+# define P0 (PARSE *)0
+# define S0 (char *)0
+
+# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+
+
+/* Enabling traces. */
+#ifndef YYDEBUG
+# define YYDEBUG 0
+#endif
+
+/* Enabling verbose error messages. */
+#ifdef YYERROR_VERBOSE
+# undef YYERROR_VERBOSE
+# define YYERROR_VERBOSE 1
+#else
+# define YYERROR_VERBOSE 0
+#endif
+
+#if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED)
+typedef int YYSTYPE;
+# define yystype YYSTYPE /* obsolescent; will be withdrawn */
+# define YYSTYPE_IS_DECLARED 1
+# define YYSTYPE_IS_TRIVIAL 1
+#endif
+
+
+
+/* Copy the second part of user declarations. */
+
+
+/* Line 214 of yacc.c. */
+#line 223 "y.tab.c"
+
+#if ! defined (yyoverflow) || YYERROR_VERBOSE
+
+/* The parser invokes alloca or malloc; define the necessary symbols. */
+
+# if YYSTACK_USE_ALLOCA
+# define YYSTACK_ALLOC alloca
+# else
+# ifndef YYSTACK_USE_ALLOCA
+# if defined (alloca) || defined (_ALLOCA_H)
+# define YYSTACK_ALLOC alloca
+# else
+# ifdef __GNUC__
+# define YYSTACK_ALLOC __builtin_alloca
+# endif
+# endif
+# endif
+# endif
+
+# ifdef YYSTACK_ALLOC
+ /* Pacify GCC's `empty if-body' warning. */
+# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0)
+# else
+# if defined (__STDC__) || defined (__cplusplus)
+# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
+# define YYSIZE_T size_t
+# endif
+# define YYSTACK_ALLOC malloc
+# define YYSTACK_FREE free
+# endif
+#endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */
+
+
+#if (! defined (yyoverflow) \
+ && (! defined (__cplusplus) \
+ || (YYSTYPE_IS_TRIVIAL)))
+
+/* A type that is properly aligned for any stack member. */
+union yyalloc
+{
+ short yyss;
+ YYSTYPE yyvs;
+ };
+
+/* The size of the maximum gap between one aligned stack and the next. */
+# define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1)
+
+/* The size of an array large to enough to hold all stacks, each with
+ N elements. */
+# define YYSTACK_BYTES(N) \
+ ((N) * (sizeof (short) + sizeof (YYSTYPE)) \
+ + YYSTACK_GAP_MAXIMUM)
+
+/* Copy COUNT objects from FROM to TO. The source and destination do
+ not overlap. */
+# ifndef YYCOPY
+# if 1 < __GNUC__
+# define YYCOPY(To, From, Count) \
+ __builtin_memcpy (To, From, (Count) * sizeof (*(From)))
+# else
+# define YYCOPY(To, From, Count) \
+ do \
+ { \
+ register YYSIZE_T yyi; \
+ for (yyi = 0; yyi < (Count); yyi++) \
+ (To)[yyi] = (From)[yyi]; \
+ } \
+ while (0)
+# endif
+# endif
+
+/* Relocate STACK from its old location to the new one. The
+ local variables YYSIZE and YYSTACKSIZE give the old and new number of
+ elements in the stack, and YYPTR gives the new location of the
+ stack. Advance YYPTR to a properly aligned location for the next
+ stack. */
+# define YYSTACK_RELOCATE(Stack) \
+ do \
+ { \
+ YYSIZE_T yynewbytes; \
+ YYCOPY (&yyptr->Stack, Stack, yysize); \
+ Stack = &yyptr->Stack; \
+ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \
+ yyptr += yynewbytes / sizeof (*yyptr); \
+ } \
+ while (0)
+
+#endif
+
+#if defined (__STDC__) || defined (__cplusplus)
+ typedef signed char yysigned_char;
+#else
+ typedef short yysigned_char;
+#endif
+
+/* YYFINAL -- State number of the termination state. */
+#define YYFINAL 43
+/* YYLAST -- Last index in YYTABLE. */
+#define YYLAST 261
+
+/* YYNTOKENS -- Number of terminals. */
+#define YYNTOKENS 49
+/* YYNNTS -- Number of nonterminals. */
+#define YYNNTS 24
+/* YYNRULES -- Number of rules. */
+#define YYNRULES 75
+/* YYNRULES -- Number of states. */
+#define YYNSTATES 159
+
+/* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */
+#define YYUNDEFTOK 2
+#define YYMAXUTOK 303
+
+#define YYTRANSLATE(YYX) \
+ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
+
+/* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */
+static const unsigned char yytranslate[] =
+{
+ 0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48
+};
+
+#if YYDEBUG
+/* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in
+ YYRHS. */
+static const unsigned char yyprhs[] =
+{
+ 0, 0, 3, 4, 6, 8, 10, 12, 15, 21,
+ 22, 25, 27, 31, 32, 34, 35, 39, 43, 47,
+ 52, 59, 63, 72, 78, 84, 90, 96, 102, 110,
+ 116, 120, 121, 122, 132, 134, 136, 138, 141, 143,
+ 147, 151, 155, 159, 163, 167, 171, 175, 179, 183,
+ 187, 190, 194, 195, 198, 203, 205, 209, 211, 212,
+ 215, 217, 218, 223, 226, 231, 236, 237, 240, 242,
+ 244, 246, 248, 250, 252, 253
+};
+
+/* YYRHS -- A `-1'-separated list of the rules' RHS. */
+static const yysigned_char yyrhs[] =
+{
+ 50, 0, -1, -1, 52, -1, 53, -1, 52, -1,
+ 57, -1, 57, 52, -1, 32, 65, 54, 11, 51,
+ -1, -1, 14, 65, -1, 53, -1, 7, 64, 8,
+ -1, -1, 32, -1, -1, 43, 51, 46, -1, 31,
+ 65, 11, -1, 47, 64, 11, -1, 67, 60, 65,
+ 11, -1, 67, 34, 65, 60, 65, 11, -1, 37,
+ 65, 11, -1, 27, 56, 47, 30, 65, 43, 51,
+ 46, -1, 39, 65, 43, 62, 46, -1, 28, 61,
+ 43, 51, 46, -1, 33, 65, 43, 51, 46, -1,
+ 23, 64, 43, 51, 46, -1, 42, 61, 43, 51,
+ 46, -1, 28, 61, 43, 51, 46, 25, 57, -1,
+ 56, 38, 47, 55, 57, -1, 34, 67, 57, -1,
+ -1, -1, 20, 70, 47, 72, 43, 58, 48, 59,
+ 46, -1, 14, -1, 9, -1, 17, -1, 24, 14,
+ -1, 67, -1, 61, 14, 61, -1, 61, 4, 61,
+ -1, 61, 12, 61, -1, 61, 13, 61, -1, 61,
+ 15, 61, -1, 61, 16, 61, -1, 61, 5, 61,
+ -1, 61, 6, 61, -1, 61, 44, 61, -1, 61,
+ 45, 61, -1, 67, 30, 65, -1, 3, 61, -1,
+ 7, 61, 8, -1, -1, 63, 62, -1, 22, 47,
+ 10, 51, -1, 65, -1, 65, 10, 64, -1, 66,
+ -1, -1, 66, 67, -1, 47, -1, -1, 18, 68,
+ 69, 19, -1, 67, 64, -1, 34, 67, 67, 64,
+ -1, 34, 67, 37, 65, -1, -1, 70, 71, -1,
+ 41, -1, 40, -1, 29, -1, 36, -1, 35, -1,
+ 26, -1, -1, 21, 65, -1
+};
+
+/* YYRLINE[YYN] -- source line where rule number YYN was defined. */
+static const unsigned short yyrline[] =
+{
+ 0, 139, 139, 141, 152, 154, 158, 160, 162, 167,
+ 170, 172, 176, 179, 182, 185, 188, 190, 192, 194,
+ 196, 198, 200, 202, 204, 206, 208, 210, 212, 214,
+ 216, 219, 221, 218, 230, 232, 234, 236, 243, 245,
+ 247, 249, 251, 253, 255, 257, 259, 261, 263, 265,
+ 267, 269, 281, 282, 286, 295, 297, 307, 312, 313,
+ 317, 319, 319, 328, 330, 332, 343, 344, 348, 350,
+ 352, 354, 356, 358, 368, 369
+};
+#endif
+
+#if YYDEBUG || YYERROR_VERBOSE
+/* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM.
+ First, the terminals, then, starting at YYNTOKENS, nonterminals. */
+static const char *const yytname[] =
+{
+ "$end", "error", "$undefined", "_BANG_t", "_BANG_EQUALS_t", "_AMPER_t",
+ "_AMPERAMPER_t", "_LPAREN_t", "_RPAREN_t", "_PLUS_EQUALS_t", "_COLON_t",
+ "_SEMIC_t", "_LANGLE_t", "_LANGLE_EQUALS_t", "_EQUALS_t", "_RANGLE_t",
+ "_RANGLE_EQUALS_t", "_QUESTION_EQUALS_t", "_LBRACKET_t", "_RBRACKET_t",
+ "ACTIONS_t", "BIND_t", "CASE_t", "CLASS_t", "DEFAULT_t", "ELSE_t",
+ "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t", "INCLUDE_t",
+ "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t", "RETURN_t",
+ "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t", "_LBRACE_t",
+ "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING", "$accept", "run",
+ "block", "rules", "null", "assign_list_opt", "arglist_opt", "local_opt",
+ "rule", "@1", "@2", "assign", "expr", "cases", "case", "lol", "list",
+ "listp", "arg", "@3", "func", "eflags", "eflag", "bindlist", 0
+};
+#endif
+
+# ifdef YYPRINT
+/* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to
+ token YYLEX-NUM. */
+static const unsigned short yytoknum[] =
+{
+ 0, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303
+};
+# endif
+
+/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
+static const unsigned char yyr1[] =
+{
+ 0, 49, 50, 50, 51, 51, 52, 52, 52, 53,
+ 54, 54, 55, 55, 56, 56, 57, 57, 57, 57,
+ 57, 57, 57, 57, 57, 57, 57, 57, 57, 57,
+ 57, 58, 59, 57, 60, 60, 60, 60, 61, 61,
+ 61, 61, 61, 61, 61, 61, 61, 61, 61, 61,
+ 61, 61, 62, 62, 63, 64, 64, 65, 66, 66,
+ 67, 68, 67, 69, 69, 69, 70, 70, 71, 71,
+ 71, 71, 71, 71, 72, 72
+};
+
+/* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */
+static const unsigned char yyr2[] =
+{
+ 0, 2, 0, 1, 1, 1, 1, 2, 5, 0,
+ 2, 1, 3, 0, 1, 0, 3, 3, 3, 4,
+ 6, 3, 8, 5, 5, 5, 5, 5, 7, 5,
+ 3, 0, 0, 9, 1, 1, 1, 2, 1, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 2, 3, 0, 2, 4, 1, 3, 1, 0, 2,
+ 1, 0, 4, 2, 4, 4, 0, 2, 1, 1,
+ 1, 1, 1, 1, 0, 2
+};
+
+/* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state
+ STATE-NUM when YYTABLE doesn't specify something else to do. Zero
+ means the default is an error. */
+static const unsigned char yydefact[] =
+{
+ 2, 61, 66, 58, 15, 0, 58, 58, 58, 0,
+ 58, 58, 0, 9, 60, 0, 3, 0, 6, 0,
+ 0, 0, 0, 55, 57, 14, 0, 0, 0, 60,
+ 0, 38, 0, 9, 0, 15, 0, 0, 0, 0,
+ 5, 4, 0, 1, 0, 7, 35, 34, 36, 0,
+ 58, 58, 0, 58, 0, 73, 70, 72, 71, 69,
+ 68, 74, 67, 9, 58, 59, 0, 50, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 9, 0, 0,
+ 58, 17, 58, 11, 0, 9, 30, 21, 52, 9,
+ 16, 18, 13, 37, 0, 0, 0, 63, 62, 58,
+ 0, 0, 56, 58, 51, 40, 45, 46, 41, 42,
+ 39, 43, 44, 0, 47, 48, 49, 10, 9, 0,
+ 0, 0, 52, 0, 58, 15, 58, 19, 58, 58,
+ 75, 31, 26, 0, 24, 8, 25, 0, 23, 53,
+ 27, 0, 29, 0, 65, 64, 0, 9, 15, 9,
+ 12, 20, 32, 0, 28, 54, 0, 22, 33
+};
+
+/* YYDEFGOTO[NTERM-NUM]. */
+static const short yydefgoto[] =
+{
+ -1, 15, 39, 40, 41, 84, 125, 17, 18, 146,
+ 156, 51, 30, 121, 122, 22, 23, 24, 31, 20,
+ 54, 21, 62, 100
+};
+
+/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
+ STATE-NUM. */
+#define YYPACT_NINF -48
+static const short yypact[] =
+{
+ 179, -48, -48, -48, -15, 7, -48, -16, -48, 3,
+ -48, -48, 7, 179, 1, 27, -48, -9, 179, 19,
+ -3, 33, -11, 24, 3, -48, -10, 7, 7, -48,
+ 138, 9, 30, 35, 13, 205, 53, 22, 151, 20,
+ -48, -48, 56, -48, 23, -48, -48, -48, -48, 61,
+ -48, -48, 3, -48, 62, -48, -48, -48, -48, -48,
+ -48, 58, -48, 179, -48, -48, 52, -48, 164, 7,
+ 7, 7, 7, 7, 7, 7, 7, 179, 7, 7,
+ -48, -48, -48, -48, 72, 179, -48, -48, 68, 179,
+ -48, -48, 85, -48, 77, 73, 8, -48, -48, -48,
+ 50, 57, -48, -48, -48, 45, 93, 93, -48, -48,
+ 45, -48, -48, 64, 245, 245, -48, -48, 179, 66,
+ 67, 69, 68, 71, -48, 205, -48, -48, -48, -48,
+ -48, -48, -48, 70, 79, -48, -48, 109, -48, -48,
+ -48, 112, -48, 115, -48, -48, 75, 179, 205, 179,
+ -48, -48, -48, 81, -48, -48, 82, -48, -48
+};
+
+/* YYPGOTO[NTERM-NUM]. */
+static const short yypgoto[] =
+{
+ -48, -48, -47, 5, 104, -48, -48, 136, -27, -48,
+ -48, 47, 60, 36, -48, -13, -4, -48, 0, -48,
+ -48, -48, -48, -48
+};
+
+/* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If
+ positive, shift that token. If negative, reduce the rule which
+ number is the opposite. If zero, do what YYDEFACT says.
+ If YYTABLE_NINF, syntax error. */
+#define YYTABLE_NINF -59
+static const short yytable[] =
+{
+ 19, 42, 32, 33, 34, 16, 36, 37, 86, 35,
+ 27, -58, -58, 19, 28, 1, 101, 25, 19, -58,
+ 53, 1, -14, 45, 65, 1, 1, 43, 46, 44,
+ 113, 52, 63, 47, 64, 19, 48, 66, 119, 80,
+ 97, 81, 123, 49, 29, 128, 94, 95, -58, 82,
+ 29, 102, 96, 50, 29, 29, 85, 72, 73, 55,
+ 75, 76, 56, 19, 87, 88, 90, 91, 57, 58,
+ 92, 135, 38, 59, 60, 93, 116, 19, 117, 99,
+ 61, 98, 103, 118, 127, 19, 46, 67, 68, 19,
+ 120, 47, 124, 131, 48, 130, 129, 69, 142, 133,
+ 153, 49, 155, 132, 148, 72, 73, 74, 75, 76,
+ 134, 141, 136, 147, 137, 138, 145, 140, 19, 149,
+ 150, 154, 143, 152, 144, 19, 151, 157, 158, 105,
+ 106, 107, 108, 109, 110, 111, 112, 83, 114, 115,
+ 26, 126, 69, 70, 71, 0, 0, 19, 19, 19,
+ 72, 73, 74, 75, 76, 69, 70, 71, 139, 0,
+ 0, 0, 0, 72, 73, 74, 75, 76, 69, 70,
+ 71, 0, 104, 0, 0, 0, 72, 73, 74, 75,
+ 76, 77, 78, 79, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 89, 78, 79, 1, 0, 2,
+ 0, 0, 3, 0, 0, 0, 4, 5, 78, 79,
+ 6, 7, 8, 9, 0, 0, 10, -15, 11, 0,
+ 0, 12, 13, 1, 0, 2, 14, 0, 3, 0,
+ 0, 0, 4, 5, 0, 0, 6, 25, 8, 9,
+ 0, 0, 10, 0, 11, 0, 0, 12, 13, 69,
+ 70, 71, 14, 0, 0, 0, 0, 72, 73, 74,
+ 75, 76
+};
+
+static const short yycheck[] =
+{
+ 0, 14, 6, 7, 8, 0, 10, 11, 35, 9,
+ 3, 10, 11, 13, 7, 18, 63, 32, 18, 18,
+ 20, 18, 38, 18, 24, 18, 18, 0, 9, 38,
+ 77, 34, 43, 14, 10, 35, 17, 47, 85, 30,
+ 53, 11, 89, 24, 47, 37, 50, 51, 47, 14,
+ 47, 64, 52, 34, 47, 47, 43, 12, 13, 26,
+ 15, 16, 29, 63, 11, 43, 46, 11, 35, 36,
+ 47, 118, 12, 40, 41, 14, 80, 77, 82, 21,
+ 47, 19, 30, 11, 11, 85, 9, 27, 28, 89,
+ 22, 14, 7, 43, 17, 99, 96, 4, 125, 103,
+ 147, 24, 149, 46, 25, 12, 13, 14, 15, 16,
+ 46, 124, 46, 43, 47, 46, 129, 46, 118, 10,
+ 8, 148, 126, 48, 128, 125, 11, 46, 46, 69,
+ 70, 71, 72, 73, 74, 75, 76, 33, 78, 79,
+ 4, 94, 4, 5, 6, -1, -1, 147, 148, 149,
+ 12, 13, 14, 15, 16, 4, 5, 6, 122, -1,
+ -1, -1, -1, 12, 13, 14, 15, 16, 4, 5,
+ 6, -1, 8, -1, -1, -1, 12, 13, 14, 15,
+ 16, 43, 44, 45, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 43, 44, 45, 18, -1, 20,
+ -1, -1, 23, -1, -1, -1, 27, 28, 44, 45,
+ 31, 32, 33, 34, -1, -1, 37, 38, 39, -1,
+ -1, 42, 43, 18, -1, 20, 47, -1, 23, -1,
+ -1, -1, 27, 28, -1, -1, 31, 32, 33, 34,
+ -1, -1, 37, -1, 39, -1, -1, 42, 43, 4,
+ 5, 6, 47, -1, -1, -1, -1, 12, 13, 14,
+ 15, 16
+};
+
+/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
+ symbol of state STATE-NUM. */
+static const unsigned char yystos[] =
+{
+ 0, 18, 20, 23, 27, 28, 31, 32, 33, 34,
+ 37, 39, 42, 43, 47, 50, 52, 56, 57, 67,
+ 68, 70, 64, 65, 66, 32, 56, 3, 7, 47,
+ 61, 67, 65, 65, 65, 67, 65, 65, 61, 51,
+ 52, 53, 64, 0, 38, 52, 9, 14, 17, 24,
+ 34, 60, 34, 67, 69, 26, 29, 35, 36, 40,
+ 41, 47, 71, 43, 10, 67, 47, 61, 61, 4,
+ 5, 6, 12, 13, 14, 15, 16, 43, 44, 45,
+ 30, 11, 14, 53, 54, 43, 57, 11, 43, 43,
+ 46, 11, 47, 14, 65, 65, 67, 64, 19, 21,
+ 72, 51, 64, 30, 8, 61, 61, 61, 61, 61,
+ 61, 61, 61, 51, 61, 61, 65, 65, 11, 51,
+ 22, 62, 63, 51, 7, 55, 60, 11, 37, 67,
+ 65, 43, 46, 65, 46, 51, 46, 47, 46, 62,
+ 46, 64, 57, 65, 65, 64, 58, 43, 25, 10,
+ 8, 11, 48, 51, 57, 51, 59, 46, 46
+};
+
+#if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__)
+# define YYSIZE_T __SIZE_TYPE__
+#endif
+#if ! defined (YYSIZE_T) && defined (size_t)
+# define YYSIZE_T size_t
+#endif
+#if ! defined (YYSIZE_T)
+# if defined (__STDC__) || defined (__cplusplus)
+# include <stddef.h> /* INFRINGES ON USER NAME SPACE */
+# define YYSIZE_T size_t
+# endif
+#endif
+#if ! defined (YYSIZE_T)
+# define YYSIZE_T unsigned int
+#endif
+
+#define yyerrok (yyerrstatus = 0)
+#define yyclearin (yychar = YYEMPTY)
+#define YYEMPTY (-2)
+#define YYEOF 0
+
+#define YYACCEPT goto yyacceptlab
+#define YYABORT goto yyabortlab
+#define YYERROR goto yyerrlab1
+
+/* Like YYERROR except do call yyerror. This remains here temporarily
+ to ease the transition to the new meaning of YYERROR, for GCC.
+ Once GCC version 2 has supplanted version 1, this can go. */
+
+#define YYFAIL goto yyerrlab
+
+#define YYRECOVERING() (!!yyerrstatus)
+
+#define YYBACKUP(Token, Value) \
+do \
+ if (yychar == YYEMPTY && yylen == 1) \
+ { \
+ yychar = (Token); \
+ yylval = (Value); \
+ yytoken = YYTRANSLATE (yychar); \
+ YYPOPSTACK; \
+ goto yybackup; \
+ } \
+ else \
+ { \
+ yyerror ("syntax error: cannot back up");\
+ YYERROR; \
+ } \
+while (0)
+
+#define YYTERROR 1
+#define YYERRCODE 256
+
+/* YYLLOC_DEFAULT -- Compute the default location (before the actions
+ are run). */
+
+#ifndef YYLLOC_DEFAULT
+# define YYLLOC_DEFAULT(Current, Rhs, N) \
+ Current.first_line = Rhs[1].first_line; \
+ Current.first_column = Rhs[1].first_column; \
+ Current.last_line = Rhs[N].last_line; \
+ Current.last_column = Rhs[N].last_column;
+#endif
+
+/* YYLEX -- calling `yylex' with the right arguments. */
+
+#ifdef YYLEX_PARAM
+# define YYLEX yylex (YYLEX_PARAM)
+#else
+# define YYLEX yylex ()
+#endif
+
+/* Enable debugging if requested. */
+#if YYDEBUG
+
+# ifndef YYFPRINTF
+# include <stdio.h> /* INFRINGES ON USER NAME SPACE */
+# define YYFPRINTF fprintf
+# endif
+
+# define YYDPRINTF(Args) \
+do { \
+ if (yydebug) \
+ YYFPRINTF Args; \
+} while (0)
+
+# define YYDSYMPRINT(Args) \
+do { \
+ if (yydebug) \
+ yysymprint Args; \
+} while (0)
+
+# define YYDSYMPRINTF(Title, Token, Value, Location) \
+do { \
+ if (yydebug) \
+ { \
+ YYFPRINTF (stderr, "%s ", Title); \
+ yysymprint (stderr, \
+ Token, Value); \
+ YYFPRINTF (stderr, "\n"); \
+ } \
+} while (0)
+
+/*------------------------------------------------------------------.
+| yy_stack_print -- Print the state stack from its BOTTOM up to its |
+| TOP (cinluded). |
+`------------------------------------------------------------------*/
+
+#if defined (__STDC__) || defined (__cplusplus)
+static void
+yy_stack_print (short *bottom, short *top)
+#else
+static void
+yy_stack_print (bottom, top)
+ short *bottom;
+ short *top;
+#endif
+{
+ YYFPRINTF (stderr, "Stack now");
+ for (/* Nothing. */; bottom <= top; ++bottom)
+ YYFPRINTF (stderr, " %d", *bottom);
+ YYFPRINTF (stderr, "\n");
+}
+
+# define YY_STACK_PRINT(Bottom, Top) \
+do { \
+ if (yydebug) \
+ yy_stack_print ((Bottom), (Top)); \
+} while (0)
+
+
+/*------------------------------------------------.
+| Report that the YYRULE is going to be reduced. |
+`------------------------------------------------*/
+
+#if defined (__STDC__) || defined (__cplusplus)
+static void
+yy_reduce_print (int yyrule)
+#else
+static void
+yy_reduce_print (yyrule)
+ int yyrule;
+#endif
+{
+ int yyi;
+ unsigned int yylineno = yyrline[yyrule];
+ YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ",
+ yyrule - 1, yylineno);
+ /* Print the symbols being reduced, and their result. */
+ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++)
+ YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]);
+ YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]);
+}
+
+# define YY_REDUCE_PRINT(Rule) \
+do { \
+ if (yydebug) \
+ yy_reduce_print (Rule); \
+} while (0)
+
+/* Nonzero means print parse trace. It is left uninitialized so that
+ multiple parsers can coexist. */
+int yydebug;
+#else /* !YYDEBUG */
+# define YYDPRINTF(Args)
+# define YYDSYMPRINT(Args)
+# define YYDSYMPRINTF(Title, Token, Value, Location)
+# define YY_STACK_PRINT(Bottom, Top)
+# define YY_REDUCE_PRINT(Rule)
+#endif /* !YYDEBUG */
+
+
+/* YYINITDEPTH -- initial size of the parser's stacks. */
+#ifndef YYINITDEPTH
+# define YYINITDEPTH 200
+#endif
+
+/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only
+ if the built-in stack extension method is used).
+
+ Do not make this value too large; the results are undefined if
+ SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH)
+ evaluated with infinite-precision integer arithmetic. */
+
+#if YYMAXDEPTH == 0
+# undef YYMAXDEPTH
+#endif
+
+#ifndef YYMAXDEPTH
+# define YYMAXDEPTH 10000
+#endif
+
+
+
+#if YYERROR_VERBOSE
+
+# ifndef yystrlen
+# if defined (__GLIBC__) && defined (_STRING_H)
+# define yystrlen strlen
+# else
+/* Return the length of YYSTR. */
+static YYSIZE_T
+# if defined (__STDC__) || defined (__cplusplus)
+yystrlen (const char *yystr)
+# else
+yystrlen (yystr)
+ const char *yystr;
+# endif
+{
+ register const char *yys = yystr;
+
+ while (*yys++ != '\0')
+ continue;
+
+ return yys - yystr - 1;
+}
+# endif
+# endif
+
+# ifndef yystpcpy
+# if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE)
+# define yystpcpy stpcpy
+# else
+/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
+ YYDEST. */
+static char *
+# if defined (__STDC__) || defined (__cplusplus)
+yystpcpy (char *yydest, const char *yysrc)
+# else
+yystpcpy (yydest, yysrc)
+ char *yydest;
+ const char *yysrc;
+# endif
+{
+ register char *yyd = yydest;
+ register const char *yys = yysrc;
+
+ while ((*yyd++ = *yys++) != '\0')
+ continue;
+
+ return yyd - 1;
+}
+# endif
+# endif
+
+#endif /* !YYERROR_VERBOSE */
+
+
+
+#if YYDEBUG
+/*--------------------------------.
+| Print this symbol on YYOUTPUT. |
+`--------------------------------*/
+
+#if defined (__STDC__) || defined (__cplusplus)
+static void
+yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep)
+#else
+static void
+yysymprint (yyoutput, yytype, yyvaluep)
+ FILE *yyoutput;
+ int yytype;
+ YYSTYPE *yyvaluep;
+#endif
+{
+ /* Pacify ``unused variable'' warnings. */
+ (void) yyvaluep;
+
+ if (yytype < YYNTOKENS)
+ {
+ YYFPRINTF (yyoutput, "token %s (", yytname[yytype]);
+# ifdef YYPRINT
+ YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep);
+# endif
+ }
+ else
+ YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]);
+
+ switch (yytype)
+ {
+ default:
+ break;
+ }
+ YYFPRINTF (yyoutput, ")");
+}
+
+#endif /* ! YYDEBUG */
+/*-----------------------------------------------.
+| Release the memory associated to this symbol. |
+`-----------------------------------------------*/
+
+#if defined (__STDC__) || defined (__cplusplus)
+static void
+yydestruct (int yytype, YYSTYPE *yyvaluep)
+#else
+static void
+yydestruct (yytype, yyvaluep)
+ int yytype;
+ YYSTYPE *yyvaluep;
+#endif
+{
+ /* Pacify ``unused variable'' warnings. */
+ (void) yyvaluep;
+
+ switch (yytype)
+ {
+
+ default:
+ break;
+ }
+}
+
+
+/* Prevent warnings from -Wmissing-prototypes. */
+
+#ifdef YYPARSE_PARAM
+# if defined (__STDC__) || defined (__cplusplus)
+int yyparse (void *YYPARSE_PARAM);
+# else
+int yyparse ();
+# endif
+#else /* ! YYPARSE_PARAM */
+#if defined (__STDC__) || defined (__cplusplus)
+int yyparse (void);
+#else
+int yyparse ();
+#endif
+#endif /* ! YYPARSE_PARAM */
+
+
+
+/* The lookahead symbol. */
+int yychar;
+
+/* The semantic value of the lookahead symbol. */
+YYSTYPE yylval;
+
+/* Number of syntax errors so far. */
+int yynerrs;
+
+
+
+/*----------.
+| yyparse. |
+`----------*/
+
+#ifdef YYPARSE_PARAM
+# if defined (__STDC__) || defined (__cplusplus)
+int yyparse (void *YYPARSE_PARAM)
+# else
+int yyparse (YYPARSE_PARAM)
+ void *YYPARSE_PARAM;
+# endif
+#else /* ! YYPARSE_PARAM */
+#if defined (__STDC__) || defined (__cplusplus)
+int
+yyparse (void)
+#else
+int
+yyparse ()
+
+#endif
+#endif
+{
+
+ register int yystate;
+ register int yyn;
+ int yyresult;
+ /* Number of tokens to shift before error messages enabled. */
+ int yyerrstatus;
+ /* Lookahead token as an internal (translated) token number. */
+ int yytoken = 0;
+
+ /* Three stacks and their tools:
+ `yyss': related to states,
+ `yyvs': related to semantic values,
+ `yyls': related to locations.
+
+ Refer to the stacks thru separate pointers, to allow yyoverflow
+ to reallocate them elsewhere. */
+
+ /* The state stack. */
+ short yyssa[YYINITDEPTH];
+ short *yyss = yyssa;
+ register short *yyssp;
+
+ /* The semantic value stack. */
+ YYSTYPE yyvsa[YYINITDEPTH];
+ YYSTYPE *yyvs = yyvsa;
+ register YYSTYPE *yyvsp;
+
+
+
+#define YYPOPSTACK (yyvsp--, yyssp--)
+
+ YYSIZE_T yystacksize = YYINITDEPTH;
+
+ /* The variables used to return semantic value and location from the
+ action routines. */
+ YYSTYPE yyval;
+
+
+ /* When reducing, the number of symbols on the RHS of the reduced
+ rule. */
+ int yylen;
+
+ YYDPRINTF ((stderr, "Starting parse\n"));
+
+ yystate = 0;
+ yyerrstatus = 0;
+ yynerrs = 0;
+ yychar = YYEMPTY; /* Cause a token to be read. */
+
+ /* Initialize stack pointers.
+ Waste one element of value and location stack
+ so that they stay on the same level as the state stack.
+ The wasted elements are never initialized. */
+
+ yyssp = yyss;
+ yyvsp = yyvs;
+
+ goto yysetstate;
+
+/*------------------------------------------------------------.
+| yynewstate -- Push a new state, which is found in yystate. |
+`------------------------------------------------------------*/
+ yynewstate:
+ /* In all cases, when you get here, the value and location stacks
+ have just been pushed. so pushing a state here evens the stacks.
+ */
+ yyssp++;
+
+ yysetstate:
+ *yyssp = yystate;
+
+ if (yyss + yystacksize - 1 <= yyssp)
+ {
+ /* Get the current used size of the three stacks, in elements. */
+ YYSIZE_T yysize = yyssp - yyss + 1;
+
+#ifdef yyoverflow
+ {
+ /* Give user a chance to reallocate the stack. Use copies of
+ these so that the &'s don't force the real ones into
+ memory. */
+ YYSTYPE *yyvs1 = yyvs;
+ short *yyss1 = yyss;
+
+
+ /* Each stack pointer address is followed by the size of the
+ data in use in that stack, in bytes. This used to be a
+ conditional around just the two extra args, but that might
+ be undefined if yyoverflow is a macro. */
+ yyoverflow ("parser stack overflow",
+ &yyss1, yysize * sizeof (*yyssp),
+ &yyvs1, yysize * sizeof (*yyvsp),
+
+ &yystacksize);
+
+ yyss = yyss1;
+ yyvs = yyvs1;
+ }
+#else /* no yyoverflow */
+# ifndef YYSTACK_RELOCATE
+ goto yyoverflowlab;
+# else
+ /* Extend the stack our own way. */
+ if (YYMAXDEPTH <= yystacksize)
+ goto yyoverflowlab;
+ yystacksize *= 2;
+ if (YYMAXDEPTH < yystacksize)
+ yystacksize = YYMAXDEPTH;
+
+ {
+ short *yyss1 = yyss;
+ union yyalloc *yyptr =
+ (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize));
+ if (! yyptr)
+ goto yyoverflowlab;
+ YYSTACK_RELOCATE (yyss);
+ YYSTACK_RELOCATE (yyvs);
+
+# undef YYSTACK_RELOCATE
+ if (yyss1 != yyssa)
+ YYSTACK_FREE (yyss1);
+ }
+# endif
+#endif /* no yyoverflow */
+
+ yyssp = yyss + yysize - 1;
+ yyvsp = yyvs + yysize - 1;
+
+
+ YYDPRINTF ((stderr, "Stack size increased to %lu\n",
+ (unsigned long int) yystacksize));
+
+ if (yyss + yystacksize - 1 <= yyssp)
+ YYABORT;
+ }
+
+ YYDPRINTF ((stderr, "Entering state %d\n", yystate));
+
+ goto yybackup;
+
+/*-----------.
+| yybackup. |
+`-----------*/
+yybackup:
+
+/* Do appropriate processing given the current state. */
+/* Read a lookahead token if we need one and don't already have one. */
+/* yyresume: */
+
+ /* First try to decide what to do without reference to lookahead token. */
+
+ yyn = yypact[yystate];
+ if (yyn == YYPACT_NINF)
+ goto yydefault;
+
+ /* Not known => get a lookahead token if don't already have one. */
+
+ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */
+ if (yychar == YYEMPTY)
+ {
+ YYDPRINTF ((stderr, "Reading a token: "));
+ yychar = YYLEX;
+ }
+
+ if (yychar <= YYEOF)
+ {
+ yychar = yytoken = YYEOF;
+ YYDPRINTF ((stderr, "Now at end of input.\n"));
+ }
+ else
+ {
+ yytoken = YYTRANSLATE (yychar);
+ YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc);
+ }
+
+ /* If the proper action on seeing token YYTOKEN is to reduce or to
+ detect an error, take that action. */
+ yyn += yytoken;
+ if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken)
+ goto yydefault;
+ yyn = yytable[yyn];
+ if (yyn <= 0)
+ {
+ if (yyn == 0 || yyn == YYTABLE_NINF)
+ goto yyerrlab;
+ yyn = -yyn;
+ goto yyreduce;
+ }
+
+ if (yyn == YYFINAL)
+ YYACCEPT;
+
+ /* Shift the lookahead token. */
+ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken]));
+
+ /* Discard the token being shifted unless it is eof. */
+ if (yychar != YYEOF)
+ yychar = YYEMPTY;
+
+ *++yyvsp = yylval;
+
+
+ /* Count tokens shifted since error; after three, turn off error
+ status. */
+ if (yyerrstatus)
+ yyerrstatus--;
+
+ yystate = yyn;
+ goto yynewstate;
+
+
+/*-----------------------------------------------------------.
+| yydefault -- do the default action for the current state. |
+`-----------------------------------------------------------*/
+yydefault:
+ yyn = yydefact[yystate];
+ if (yyn == 0)
+ goto yyerrlab;
+ goto yyreduce;
+
+
+/*-----------------------------.
+| yyreduce -- Do a reduction. |
+`-----------------------------*/
+yyreduce:
+ /* yyn is the number of a rule to reduce with. */
+ yylen = yyr2[yyn];
+
+ /* If YYLEN is nonzero, implement the default value of the action:
+ `$$ = $1'.
+
+ Otherwise, the following line sets YYVAL to garbage.
+ This behavior is undocumented and Bison
+ users should not rely upon it. Assigning to YYVAL
+ unconditionally makes the parser a bit smaller, and it avoids a
+ GCC warning that YYVAL may be used uninitialized. */
+ yyval = yyvsp[1-yylen];
+
+
+ YY_REDUCE_PRINT (yyn);
+ switch (yyn)
+ {
+ case 3:
+#line 142 "jamgram.y"
+ { parse_save( yyvsp[0].parse ); }
+ break;
+
+ case 4:
+#line 153 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+ break;
+
+ case 5:
+#line 155 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+ break;
+
+ case 6:
+#line 159 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+ break;
+
+ case 7:
+#line 161 "jamgram.y"
+ { yyval.parse = prules( yyvsp[-1].parse, yyvsp[0].parse ); }
+ break;
+
+ case 8:
+#line 163 "jamgram.y"
+ { yyval.parse = plocal( yyvsp[-3].parse, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 9:
+#line 167 "jamgram.y"
+ { yyval.parse = pnull(); }
+ break;
+
+ case 10:
+#line 171 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_SET; }
+ break;
+
+ case 11:
+#line 173 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_APPEND; }
+ break;
+
+ case 12:
+#line 177 "jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+ break;
+
+ case 13:
+#line 179 "jamgram.y"
+ { yyval.parse = P0; }
+ break;
+
+ case 14:
+#line 183 "jamgram.y"
+ { yyval.number = 1; }
+ break;
+
+ case 15:
+#line 185 "jamgram.y"
+ { yyval.number = 0; }
+ break;
+
+ case 16:
+#line 189 "jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+ break;
+
+ case 17:
+#line 191 "jamgram.y"
+ { yyval.parse = pincl( yyvsp[-1].parse ); }
+ break;
+
+ case 18:
+#line 193 "jamgram.y"
+ { yyval.parse = prule( yyvsp[-2].string, yyvsp[-1].parse ); }
+ break;
+
+ case 19:
+#line 195 "jamgram.y"
+ { yyval.parse = pset( yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-2].number ); }
+ break;
+
+ case 20:
+#line 197 "jamgram.y"
+ { yyval.parse = pset1( yyvsp[-5].parse, yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-2].number ); }
+ break;
+
+ case 21:
+#line 199 "jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+ break;
+
+ case 22:
+#line 201 "jamgram.y"
+ { yyval.parse = pfor( yyvsp[-5].string, yyvsp[-3].parse, yyvsp[-1].parse, yyvsp[-6].number ); }
+ break;
+
+ case 23:
+#line 203 "jamgram.y"
+ { yyval.parse = pswitch( yyvsp[-3].parse, yyvsp[-1].parse ); }
+ break;
+
+ case 24:
+#line 205 "jamgram.y"
+ { yyval.parse = pif( yyvsp[-3].parse, yyvsp[-1].parse, pnull() ); }
+ break;
+
+ case 25:
+#line 207 "jamgram.y"
+ { yyval.parse = pmodule( yyvsp[-3].parse, yyvsp[-1].parse ); }
+ break;
+
+ case 26:
+#line 209 "jamgram.y"
+ { yyval.parse = pclass( yyvsp[-3].parse, yyvsp[-1].parse ); }
+ break;
+
+ case 27:
+#line 211 "jamgram.y"
+ { yyval.parse = pwhile( yyvsp[-3].parse, yyvsp[-1].parse ); }
+ break;
+
+ case 28:
+#line 213 "jamgram.y"
+ { yyval.parse = pif( yyvsp[-5].parse, yyvsp[-3].parse, yyvsp[0].parse ); }
+ break;
+
+ case 29:
+#line 215 "jamgram.y"
+ { yyval.parse = psetc( yyvsp[-2].string, yyvsp[0].parse, yyvsp[-1].parse, yyvsp[-4].number ); }
+ break;
+
+ case 30:
+#line 217 "jamgram.y"
+ { yyval.parse = pon( yyvsp[-1].parse, yyvsp[0].parse ); }
+ break;
+
+ case 31:
+#line 219 "jamgram.y"
+ { yymode( SCAN_STRING ); }
+ break;
+
+ case 32:
+#line 221 "jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+ break;
+
+ case 33:
+#line 223 "jamgram.y"
+ { yyval.parse = psete( yyvsp[-6].string,yyvsp[-5].parse,yyvsp[-2].string,yyvsp[-7].number ); }
+ break;
+
+ case 34:
+#line 231 "jamgram.y"
+ { yyval.number = ASSIGN_SET; }
+ break;
+
+ case 35:
+#line 233 "jamgram.y"
+ { yyval.number = ASSIGN_APPEND; }
+ break;
+
+ case 36:
+#line 235 "jamgram.y"
+ { yyval.number = ASSIGN_DEFAULT; }
+ break;
+
+ case 37:
+#line 237 "jamgram.y"
+ { yyval.number = ASSIGN_DEFAULT; }
+ break;
+
+ case 38:
+#line 244 "jamgram.y"
+ { yyval.parse = peval( EXPR_EXISTS, yyvsp[0].parse, pnull() ); }
+ break;
+
+ case 39:
+#line 246 "jamgram.y"
+ { yyval.parse = peval( EXPR_EQUALS, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 40:
+#line 248 "jamgram.y"
+ { yyval.parse = peval( EXPR_NOTEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 41:
+#line 250 "jamgram.y"
+ { yyval.parse = peval( EXPR_LESS, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 42:
+#line 252 "jamgram.y"
+ { yyval.parse = peval( EXPR_LESSEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 43:
+#line 254 "jamgram.y"
+ { yyval.parse = peval( EXPR_MORE, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 44:
+#line 256 "jamgram.y"
+ { yyval.parse = peval( EXPR_MOREEQ, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 45:
+#line 258 "jamgram.y"
+ { yyval.parse = peval( EXPR_AND, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 46:
+#line 260 "jamgram.y"
+ { yyval.parse = peval( EXPR_AND, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 47:
+#line 262 "jamgram.y"
+ { yyval.parse = peval( EXPR_OR, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 48:
+#line 264 "jamgram.y"
+ { yyval.parse = peval( EXPR_OR, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 49:
+#line 266 "jamgram.y"
+ { yyval.parse = peval( EXPR_IN, yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 50:
+#line 268 "jamgram.y"
+ { yyval.parse = peval( EXPR_NOT, yyvsp[0].parse, pnull() ); }
+ break;
+
+ case 51:
+#line 270 "jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+ break;
+
+ case 52:
+#line 281 "jamgram.y"
+ { yyval.parse = P0; }
+ break;
+
+ case 53:
+#line 283 "jamgram.y"
+ { yyval.parse = pnode( yyvsp[-1].parse, yyvsp[0].parse ); }
+ break;
+
+ case 54:
+#line 287 "jamgram.y"
+ { yyval.parse = psnode( yyvsp[-2].string, yyvsp[0].parse ); }
+ break;
+
+ case 55:
+#line 296 "jamgram.y"
+ { yyval.parse = pnode( P0, yyvsp[0].parse ); }
+ break;
+
+ case 56:
+#line 298 "jamgram.y"
+ { yyval.parse = pnode( yyvsp[0].parse, yyvsp[-2].parse ); }
+ break;
+
+ case 57:
+#line 308 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; yymode( SCAN_NORMAL ); }
+ break;
+
+ case 58:
+#line 312 "jamgram.y"
+ { yyval.parse = pnull(); yymode( SCAN_PUNCT ); }
+ break;
+
+ case 59:
+#line 314 "jamgram.y"
+ { yyval.parse = pappend( yyvsp[-1].parse, yyvsp[0].parse ); }
+ break;
+
+ case 60:
+#line 318 "jamgram.y"
+ { yyval.parse = plist( yyvsp[0].string ); }
+ break;
+
+ case 61:
+#line 319 "jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+ break;
+
+ case 62:
+#line 320 "jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+ break;
+
+ case 63:
+#line 329 "jamgram.y"
+ { yyval.parse = prule( yyvsp[-1].string, yyvsp[0].parse ); }
+ break;
+
+ case 64:
+#line 331 "jamgram.y"
+ { yyval.parse = pon( yyvsp[-2].parse, prule( yyvsp[-1].string, yyvsp[0].parse ) ); }
+ break;
+
+ case 65:
+#line 333 "jamgram.y"
+ { yyval.parse = pon( yyvsp[-2].parse, yyvsp[0].parse ); }
+ break;
+
+ case 66:
+#line 343 "jamgram.y"
+ { yyval.number = 0; }
+ break;
+
+ case 67:
+#line 345 "jamgram.y"
+ { yyval.number = yyvsp[-1].number | yyvsp[0].number; }
+ break;
+
+ case 68:
+#line 349 "jamgram.y"
+ { yyval.number = EXEC_UPDATED; }
+ break;
+
+ case 69:
+#line 351 "jamgram.y"
+ { yyval.number = EXEC_TOGETHER; }
+ break;
+
+ case 70:
+#line 353 "jamgram.y"
+ { yyval.number = EXEC_IGNORE; }
+ break;
+
+ case 71:
+#line 355 "jamgram.y"
+ { yyval.number = EXEC_QUIETLY; }
+ break;
+
+ case 72:
+#line 357 "jamgram.y"
+ { yyval.number = EXEC_PIECEMEAL; }
+ break;
+
+ case 73:
+#line 359 "jamgram.y"
+ { yyval.number = EXEC_EXISTING; }
+ break;
+
+ case 74:
+#line 368 "jamgram.y"
+ { yyval.parse = pnull(); }
+ break;
+
+ case 75:
+#line 370 "jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+ break;
+
+
+ }
+
+/* Line 991 of yacc.c. */
+#line 1621 "y.tab.c"
+
+ yyvsp -= yylen;
+ yyssp -= yylen;
+
+
+ YY_STACK_PRINT (yyss, yyssp);
+
+ *++yyvsp = yyval;
+
+
+ /* Now `shift' the result of the reduction. Determine what state
+ that goes to, based on the state we popped back to and the rule
+ number reduced by. */
+
+ yyn = yyr1[yyn];
+
+ yystate = yypgoto[yyn - YYNTOKENS] + *yyssp;
+ if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp)
+ yystate = yytable[yystate];
+ else
+ yystate = yydefgoto[yyn - YYNTOKENS];
+
+ goto yynewstate;
+
+
+/*------------------------------------.
+| yyerrlab -- here on detecting error |
+`------------------------------------*/
+yyerrlab:
+ /* If not already recovering from an error, report this error. */
+ if (!yyerrstatus)
+ {
+ ++yynerrs;
+#if YYERROR_VERBOSE
+ yyn = yypact[yystate];
+
+ if (YYPACT_NINF < yyn && yyn < YYLAST)
+ {
+ YYSIZE_T yysize = 0;
+ int yytype = YYTRANSLATE (yychar);
+ char *yymsg;
+ int yyx, yycount;
+
+ yycount = 0;
+ /* Start YYX at -YYN if negative to avoid negative indexes in
+ YYCHECK. */
+ for (yyx = yyn < 0 ? -yyn : 0;
+ yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++)
+ if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
+ yysize += yystrlen (yytname[yyx]) + 15, yycount++;
+ yysize += yystrlen ("syntax error, unexpected ") + 1;
+ yysize += yystrlen (yytname[yytype]);
+ yymsg = (char *) YYSTACK_ALLOC (yysize);
+ if (yymsg != 0)
+ {
+ char *yyp = yystpcpy (yymsg, "syntax error, unexpected ");
+ yyp = yystpcpy (yyp, yytname[yytype]);
+
+ if (yycount < 5)
+ {
+ yycount = 0;
+ for (yyx = yyn < 0 ? -yyn : 0;
+ yyx < (int) (sizeof (yytname) / sizeof (char *));
+ yyx++)
+ if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
+ {
+ const char *yyq = ! yycount ? ", expecting " : " or ";
+ yyp = yystpcpy (yyp, yyq);
+ yyp = yystpcpy (yyp, yytname[yyx]);
+ yycount++;
+ }
+ }
+ yyerror (yymsg);
+ YYSTACK_FREE (yymsg);
+ }
+ else
+ yyerror ("syntax error; also virtual memory exhausted");
+ }
+ else
+#endif /* YYERROR_VERBOSE */
+ yyerror ("syntax error");
+ }
+
+
+
+ if (yyerrstatus == 3)
+ {
+ /* If just tried and failed to reuse lookahead token after an
+ error, discard it. */
+
+ /* Return failure if at end of input. */
+ if (yychar == YYEOF)
+ {
+ /* Pop the error token. */
+ YYPOPSTACK;
+ /* Pop the rest of the stack. */
+ while (yyss < yyssp)
+ {
+ YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp);
+ yydestruct (yystos[*yyssp], yyvsp);
+ YYPOPSTACK;
+ }
+ YYABORT;
+ }
+
+ YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc);
+ yydestruct (yytoken, &yylval);
+ yychar = YYEMPTY;
+
+ }
+
+ /* Else will try to reuse lookahead token after shifting the error
+ token. */
+ goto yyerrlab2;
+
+
+/*----------------------------------------------------.
+| yyerrlab1 -- error raised explicitly by an action. |
+`----------------------------------------------------*/
+yyerrlab1:
+
+ /* Suppress GCC warning that yyerrlab1 is unused when no action
+ invokes YYERROR. */
+#if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__)
+ __attribute__ ((__unused__))
+#endif
+
+
+ goto yyerrlab2;
+
+
+/*---------------------------------------------------------------.
+| yyerrlab2 -- pop states until the error token can be shifted. |
+`---------------------------------------------------------------*/
+yyerrlab2:
+ yyerrstatus = 3; /* Each real token shifted decrements this. */
+
+ for (;;)
+ {
+ yyn = yypact[yystate];
+ if (yyn != YYPACT_NINF)
+ {
+ yyn += YYTERROR;
+ if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR)
+ {
+ yyn = yytable[yyn];
+ if (0 < yyn)
+ break;
+ }
+ }
+
+ /* Pop the current state because it cannot handle the error token. */
+ if (yyssp == yyss)
+ YYABORT;
+
+ YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp);
+ yydestruct (yystos[yystate], yyvsp);
+ yyvsp--;
+ yystate = *--yyssp;
+
+ YY_STACK_PRINT (yyss, yyssp);
+ }
+
+ if (yyn == YYFINAL)
+ YYACCEPT;
+
+ YYDPRINTF ((stderr, "Shifting error token, "));
+
+ *++yyvsp = yylval;
+
+
+ yystate = yyn;
+ goto yynewstate;
+
+
+/*-------------------------------------.
+| yyacceptlab -- YYACCEPT comes here. |
+`-------------------------------------*/
+yyacceptlab:
+ yyresult = 0;
+ goto yyreturn;
+
+/*-----------------------------------.
+| yyabortlab -- YYABORT comes here. |
+`-----------------------------------*/
+yyabortlab:
+ yyresult = 1;
+ goto yyreturn;
+
+#ifndef yyoverflow
+/*----------------------------------------------.
+| yyoverflowlab -- parser overflow comes here. |
+`----------------------------------------------*/
+yyoverflowlab:
+ yyerror ("parser stack overflow");
+ yyresult = 2;
+ /* Fall through. */
+#endif
+
+yyreturn:
+#ifndef yyoverflow
+ if (yyss != yyssa)
+ YYSTACK_FREE (yyss);
+#endif
+ return yyresult;
+}
+
+
+
diff --git a/jam-files/engine/jamgram.h b/jam-files/engine/jamgram.h
new file mode 100644
index 000000000..3cb765641
--- /dev/null
+++ b/jam-files/engine/jamgram.h
@@ -0,0 +1,140 @@
+/* A Bison parser, made by GNU Bison 1.875. */
+
+/* Skeleton parser for Yacc-like parsing with Bison,
+ Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc.
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2, or (at your option)
+ any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place - Suite 330,
+ Boston, MA 02111-1307, USA. */
+
+/* As a special exception, when this file is copied by Bison into a
+ Bison output file, you may use that output file without restriction.
+ This special exception was added by the Free Software Foundation
+ in version 1.24 of Bison. */
+
+/* Tokens. */
+#ifndef YYTOKENTYPE
+# define YYTOKENTYPE
+ /* Put the tokens into the symbol table, so that GDB and other debuggers
+ know about them. */
+ enum yytokentype {
+ _BANG_t = 258,
+ _BANG_EQUALS_t = 259,
+ _AMPER_t = 260,
+ _AMPERAMPER_t = 261,
+ _LPAREN_t = 262,
+ _RPAREN_t = 263,
+ _PLUS_EQUALS_t = 264,
+ _COLON_t = 265,
+ _SEMIC_t = 266,
+ _LANGLE_t = 267,
+ _LANGLE_EQUALS_t = 268,
+ _EQUALS_t = 269,
+ _RANGLE_t = 270,
+ _RANGLE_EQUALS_t = 271,
+ _QUESTION_EQUALS_t = 272,
+ _LBRACKET_t = 273,
+ _RBRACKET_t = 274,
+ ACTIONS_t = 275,
+ BIND_t = 276,
+ CASE_t = 277,
+ CLASS_t = 278,
+ DEFAULT_t = 279,
+ ELSE_t = 280,
+ EXISTING_t = 281,
+ FOR_t = 282,
+ IF_t = 283,
+ IGNORE_t = 284,
+ IN_t = 285,
+ INCLUDE_t = 286,
+ LOCAL_t = 287,
+ MODULE_t = 288,
+ ON_t = 289,
+ PIECEMEAL_t = 290,
+ QUIETLY_t = 291,
+ RETURN_t = 292,
+ RULE_t = 293,
+ SWITCH_t = 294,
+ TOGETHER_t = 295,
+ UPDATED_t = 296,
+ WHILE_t = 297,
+ _LBRACE_t = 298,
+ _BAR_t = 299,
+ _BARBAR_t = 300,
+ _RBRACE_t = 301,
+ ARG = 302,
+ STRING = 303
+ };
+#endif
+#define _BANG_t 258
+#define _BANG_EQUALS_t 259
+#define _AMPER_t 260
+#define _AMPERAMPER_t 261
+#define _LPAREN_t 262
+#define _RPAREN_t 263
+#define _PLUS_EQUALS_t 264
+#define _COLON_t 265
+#define _SEMIC_t 266
+#define _LANGLE_t 267
+#define _LANGLE_EQUALS_t 268
+#define _EQUALS_t 269
+#define _RANGLE_t 270
+#define _RANGLE_EQUALS_t 271
+#define _QUESTION_EQUALS_t 272
+#define _LBRACKET_t 273
+#define _RBRACKET_t 274
+#define ACTIONS_t 275
+#define BIND_t 276
+#define CASE_t 277
+#define CLASS_t 278
+#define DEFAULT_t 279
+#define ELSE_t 280
+#define EXISTING_t 281
+#define FOR_t 282
+#define IF_t 283
+#define IGNORE_t 284
+#define IN_t 285
+#define INCLUDE_t 286
+#define LOCAL_t 287
+#define MODULE_t 288
+#define ON_t 289
+#define PIECEMEAL_t 290
+#define QUIETLY_t 291
+#define RETURN_t 292
+#define RULE_t 293
+#define SWITCH_t 294
+#define TOGETHER_t 295
+#define UPDATED_t 296
+#define WHILE_t 297
+#define _LBRACE_t 298
+#define _BAR_t 299
+#define _BARBAR_t 300
+#define _RBRACE_t 301
+#define ARG 302
+#define STRING 303
+
+
+
+
+#if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED)
+typedef int YYSTYPE;
+# define yystype YYSTYPE /* obsolescent; will be withdrawn */
+# define YYSTYPE_IS_DECLARED 1
+# define YYSTYPE_IS_TRIVIAL 1
+#endif
+
+extern YYSTYPE yylval;
+
+
+
diff --git a/jam-files/engine/jamgram.y b/jam-files/engine/jamgram.y
new file mode 100644
index 000000000..c26b1e1b6
--- /dev/null
+++ b/jam-files/engine/jamgram.y
@@ -0,0 +1,371 @@
+%token _BANG_t
+%token _BANG_EQUALS_t
+%token _AMPER_t
+%token _AMPERAMPER_t
+%token _LPAREN_t
+%token _RPAREN_t
+%token _PLUS_EQUALS_t
+%token _COLON_t
+%token _SEMIC_t
+%token _LANGLE_t
+%token _LANGLE_EQUALS_t
+%token _EQUALS_t
+%token _RANGLE_t
+%token _RANGLE_EQUALS_t
+%token _QUESTION_EQUALS_t
+%token _LBRACKET_t
+%token _RBRACKET_t
+%token ACTIONS_t
+%token BIND_t
+%token CASE_t
+%token CLASS_t
+%token DEFAULT_t
+%token ELSE_t
+%token EXISTING_t
+%token FOR_t
+%token IF_t
+%token IGNORE_t
+%token IN_t
+%token INCLUDE_t
+%token LOCAL_t
+%token MODULE_t
+%token ON_t
+%token PIECEMEAL_t
+%token QUIETLY_t
+%token RETURN_t
+%token RULE_t
+%token SWITCH_t
+%token TOGETHER_t
+%token UPDATED_t
+%token WHILE_t
+%token _LBRACE_t
+%token _BAR_t
+%token _BARBAR_t
+%token _RBRACE_t
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jamgram.yy - jam grammar
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
+ * 09/15/94 (seiwald) - if conditionals take only single arguments, so
+ * that 'if foo == bar' gives syntax error (use =).
+ * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
+ * punctuation keywords as keywords. All arg lists
+ * are terminated with punctuation keywords.
+ *
+ * 09/11/00 (seiwald) - Support for function calls:
+ *
+ * Rules now return lists (LIST *), rather than void.
+ *
+ * New "[ rule ]" syntax evals rule into a LIST.
+ *
+ * Lists are now generated by compile_list() and
+ * compile_append(), and any other rule that indirectly
+ * makes a list, rather than being built directly here,
+ * so that lists values can contain rule evaluations.
+ *
+ * New 'return' rule sets the return value, though
+ * other statements also may have return values.
+ *
+ * 'run' production split from 'block' production so
+ * that empty blocks can be handled separately.
+ */
+
+%token ARG STRING
+
+%left _BARBAR_t _BAR_t
+%left _AMPERAMPER_t _AMPER_t
+%left _EQUALS_t _BANG_EQUALS_t IN_t
+%left _LANGLE_t _LANGLE_EQUALS_t _RANGLE_t _RANGLE_EQUALS_t
+%left _BANG_t
+
+%{
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "newstr.h"
+#include "rules.h"
+
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 (LIST *(*)(PARSE *, FRAME *))0
+# define P0 (PARSE *)0
+# define S0 (char *)0
+
+# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+%}
+
+%%
+
+run : /* empty */
+ /* do nothing */
+ | rules
+ { parse_save( $1.parse ); }
+ ;
+
+/*
+ * block - zero or more rules
+ * rules - one or more rules
+ * rule - any one of jam's rules
+ * right-recursive so rules execute in order.
+ */
+
+block : null
+ { $$.parse = $1.parse; }
+ | rules
+ { $$.parse = $1.parse; }
+ ;
+
+rules : rule
+ { $$.parse = $1.parse; }
+ | rule rules
+ { $$.parse = prules( $1.parse, $2.parse ); }
+ | LOCAL_t list assign_list_opt _SEMIC_t block
+ { $$.parse = plocal( $2.parse, $3.parse, $5.parse ); }
+ ;
+
+null : /* empty */
+ { $$.parse = pnull(); }
+ ;
+
+assign_list_opt : _EQUALS_t list
+ { $$.parse = $2.parse; $$.number = ASSIGN_SET; }
+ | null
+ { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
+ ;
+
+arglist_opt : _LPAREN_t lol _RPAREN_t
+ { $$.parse = $2.parse; }
+ |
+ { $$.parse = P0; }
+ ;
+
+local_opt : LOCAL_t
+ { $$.number = 1; }
+ | /* empty */
+ { $$.number = 0; }
+ ;
+
+rule : _LBRACE_t block _RBRACE_t
+ { $$.parse = $2.parse; }
+ | INCLUDE_t list _SEMIC_t
+ { $$.parse = pincl( $2.parse ); }
+ | ARG lol _SEMIC_t
+ { $$.parse = prule( $1.string, $2.parse ); }
+ | arg assign list _SEMIC_t
+ { $$.parse = pset( $1.parse, $3.parse, $2.number ); }
+ | arg ON_t list assign list _SEMIC_t
+ { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); }
+ | RETURN_t list _SEMIC_t
+ { $$.parse = $2.parse; }
+ | FOR_t local_opt ARG IN_t list _LBRACE_t block _RBRACE_t
+ { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); }
+ | SWITCH_t list _LBRACE_t cases _RBRACE_t
+ { $$.parse = pswitch( $2.parse, $4.parse ); }
+ | IF_t expr _LBRACE_t block _RBRACE_t
+ { $$.parse = pif( $2.parse, $4.parse, pnull() ); }
+ | MODULE_t list _LBRACE_t block _RBRACE_t
+ { $$.parse = pmodule( $2.parse, $4.parse ); }
+ | CLASS_t lol _LBRACE_t block _RBRACE_t
+ { $$.parse = pclass( $2.parse, $4.parse ); }
+ | WHILE_t expr _LBRACE_t block _RBRACE_t
+ { $$.parse = pwhile( $2.parse, $4.parse ); }
+ | IF_t expr _LBRACE_t block _RBRACE_t ELSE_t rule
+ { $$.parse = pif( $2.parse, $4.parse, $7.parse ); }
+ | local_opt RULE_t ARG arglist_opt rule
+ { $$.parse = psetc( $3.string, $5.parse, $4.parse, $1.number ); }
+ | ON_t arg rule
+ { $$.parse = pon( $2.parse, $3.parse ); }
+ | ACTIONS_t eflags ARG bindlist _LBRACE_t
+ { yymode( SCAN_STRING ); }
+ STRING
+ { yymode( SCAN_NORMAL ); }
+ _RBRACE_t
+ { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
+ ;
+
+/*
+ * assign - = or +=
+ */
+
+assign : _EQUALS_t
+ { $$.number = ASSIGN_SET; }
+ | _PLUS_EQUALS_t
+ { $$.number = ASSIGN_APPEND; }
+ | _QUESTION_EQUALS_t
+ { $$.number = ASSIGN_DEFAULT; }
+ | DEFAULT_t _EQUALS_t
+ { $$.number = ASSIGN_DEFAULT; }
+ ;
+
+/*
+ * expr - an expression for if
+ */
+expr : arg
+ { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); }
+ | expr _EQUALS_t expr
+ { $$.parse = peval( EXPR_EQUALS, $1.parse, $3.parse ); }
+ | expr _BANG_EQUALS_t expr
+ { $$.parse = peval( EXPR_NOTEQ, $1.parse, $3.parse ); }
+ | expr _LANGLE_t expr
+ { $$.parse = peval( EXPR_LESS, $1.parse, $3.parse ); }
+ | expr _LANGLE_EQUALS_t expr
+ { $$.parse = peval( EXPR_LESSEQ, $1.parse, $3.parse ); }
+ | expr _RANGLE_t expr
+ { $$.parse = peval( EXPR_MORE, $1.parse, $3.parse ); }
+ | expr _RANGLE_EQUALS_t expr
+ { $$.parse = peval( EXPR_MOREEQ, $1.parse, $3.parse ); }
+ | expr _AMPER_t expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
+ | expr _AMPERAMPER_t expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
+ | expr _BAR_t expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
+ | expr _BARBAR_t expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
+ | arg IN_t list
+ { $$.parse = peval( EXPR_IN, $1.parse, $3.parse ); }
+ | _BANG_t expr
+ { $$.parse = peval( EXPR_NOT, $2.parse, pnull() ); }
+ | _LPAREN_t expr _RPAREN_t
+ { $$.parse = $2.parse; }
+ ;
+
+
+/*
+ * cases - action elements inside a 'switch'
+ * case - a single action element inside a 'switch'
+ * right-recursive rule so cases can be examined in order.
+ */
+
+cases : /* empty */
+ { $$.parse = P0; }
+ | case cases
+ { $$.parse = pnode( $1.parse, $2.parse ); }
+ ;
+
+case : CASE_t ARG _COLON_t block
+ { $$.parse = psnode( $2.string, $4.parse ); }
+ ;
+
+/*
+ * lol - list of lists
+ * right-recursive rule so that lists can be added in order.
+ */
+
+lol : list
+ { $$.parse = pnode( P0, $1.parse ); }
+ | list _COLON_t lol
+ { $$.parse = pnode( $3.parse, $1.parse ); }
+ ;
+
+/*
+ * list - zero or more args in a LIST
+ * listp - list (in puncutation only mode)
+ * arg - one ARG or function call
+ */
+
+list : listp
+ { $$.parse = $1.parse; yymode( SCAN_NORMAL ); }
+ ;
+
+listp : /* empty */
+ { $$.parse = pnull(); yymode( SCAN_PUNCT ); }
+ | listp arg
+ { $$.parse = pappend( $1.parse, $2.parse ); }
+ ;
+
+arg : ARG
+ { $$.parse = plist( $1.string ); }
+ | _LBRACKET_t { yymode( SCAN_NORMAL ); } func _RBRACKET_t
+ { $$.parse = $3.parse; }
+ ;
+
+/*
+ * func - a function call (inside [])
+ * This needs to be split cleanly out of 'rule'
+ */
+
+func : arg lol
+ { $$.parse = prule( $1.string, $2.parse ); }
+ | ON_t arg arg lol
+ { $$.parse = pon( $2.parse, prule( $3.string, $4.parse ) ); }
+ | ON_t arg RETURN_t list
+ { $$.parse = pon( $2.parse, $4.parse ); }
+ ;
+
+
+/*
+ * eflags - zero or more modifiers to 'executes'
+ * eflag - a single modifier to 'executes'
+ */
+
+eflags : /* empty */
+ { $$.number = 0; }
+ | eflags eflag
+ { $$.number = $1.number | $2.number; }
+ ;
+
+eflag : UPDATED_t
+ { $$.number = EXEC_UPDATED; }
+ | TOGETHER_t
+ { $$.number = EXEC_TOGETHER; }
+ | IGNORE_t
+ { $$.number = EXEC_IGNORE; }
+ | QUIETLY_t
+ { $$.number = EXEC_QUIETLY; }
+ | PIECEMEAL_t
+ { $$.number = EXEC_PIECEMEAL; }
+ | EXISTING_t
+ { $$.number = EXEC_EXISTING; }
+ ;
+
+
+/*
+ * bindlist - list of variable to bind for an action
+ */
+
+bindlist : /* empty */
+ { $$.parse = pnull(); }
+ | BIND_t list
+ { $$.parse = $2.parse; }
+ ;
diff --git a/jam-files/engine/jamgram.yy b/jam-files/engine/jamgram.yy
new file mode 100644
index 000000000..152434871
--- /dev/null
+++ b/jam-files/engine/jamgram.yy
@@ -0,0 +1,329 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jamgram.yy - jam grammar
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
+ * 09/15/94 (seiwald) - if conditionals take only single arguments, so
+ * that 'if foo == bar' gives syntax error (use =).
+ * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
+ * punctuation keywords as keywords. All arg lists
+ * are terminated with punctuation keywords.
+ *
+ * 09/11/00 (seiwald) - Support for function calls:
+ *
+ * Rules now return lists (LIST *), rather than void.
+ *
+ * New "[ rule ]" syntax evals rule into a LIST.
+ *
+ * Lists are now generated by compile_list() and
+ * compile_append(), and any other rule that indirectly
+ * makes a list, rather than being built directly here,
+ * so that lists values can contain rule evaluations.
+ *
+ * New 'return' rule sets the return value, though
+ * other statements also may have return values.
+ *
+ * 'run' production split from 'block' production so
+ * that empty blocks can be handled separately.
+ */
+
+%token ARG STRING
+
+%left `||` `|`
+%left `&&` `&`
+%left `=` `!=` `in`
+%left `<` `<=` `>` `>=`
+%left `!`
+
+%{
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "newstr.h"
+#include "rules.h"
+
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 (LIST *(*)(PARSE *, FRAME *))0
+# define P0 (PARSE *)0
+# define S0 (char *)0
+
+# define pappend( l,r ) parse_make( compile_append,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( compile_eval,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( compile_foreach,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( compile_if,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( compile_include,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( compile_list,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( compile_local,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( compile_module,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( compile_class,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( compile_null,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( compile_on,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( compile_rule,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( compile_rules,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( compile_set,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( compile_settings,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( compile_setcomp,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( compile_setexec,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( compile_switch,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( compile_while,l,r,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+%}
+
+%%
+
+run : /* empty */
+ /* do nothing */
+ | rules
+ { parse_save( $1.parse ); }
+ ;
+
+/*
+ * block - zero or more rules
+ * rules - one or more rules
+ * rule - any one of jam's rules
+ * right-recursive so rules execute in order.
+ */
+
+block : null
+ { $$.parse = $1.parse; }
+ | rules
+ { $$.parse = $1.parse; }
+ ;
+
+rules : rule
+ { $$.parse = $1.parse; }
+ | rule rules
+ { $$.parse = prules( $1.parse, $2.parse ); }
+ | `local` list assign_list_opt `;` block
+ { $$.parse = plocal( $2.parse, $3.parse, $5.parse ); }
+ ;
+
+null : /* empty */
+ { $$.parse = pnull(); }
+ ;
+
+assign_list_opt : `=` list
+ { $$.parse = $2.parse; $$.number = ASSIGN_SET; }
+ | null
+ { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
+ ;
+
+arglist_opt : `(` lol `)`
+ { $$.parse = $2.parse; }
+ |
+ { $$.parse = P0; }
+ ;
+
+local_opt : `local`
+ { $$.number = 1; }
+ | /* empty */
+ { $$.number = 0; }
+ ;
+
+rule : `{` block `}`
+ { $$.parse = $2.parse; }
+ | `include` list `;`
+ { $$.parse = pincl( $2.parse ); }
+ | ARG lol `;`
+ { $$.parse = prule( $1.string, $2.parse ); }
+ | arg assign list `;`
+ { $$.parse = pset( $1.parse, $3.parse, $2.number ); }
+ | arg `on` list assign list `;`
+ { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); }
+ | `return` list `;`
+ { $$.parse = $2.parse; }
+ | `for` local_opt ARG `in` list `{` block `}`
+ { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); }
+ | `switch` list `{` cases `}`
+ { $$.parse = pswitch( $2.parse, $4.parse ); }
+ | `if` expr `{` block `}`
+ { $$.parse = pif( $2.parse, $4.parse, pnull() ); }
+ | `module` list `{` block `}`
+ { $$.parse = pmodule( $2.parse, $4.parse ); }
+ | `class` lol `{` block `}`
+ { $$.parse = pclass( $2.parse, $4.parse ); }
+ | `while` expr `{` block `}`
+ { $$.parse = pwhile( $2.parse, $4.parse ); }
+ | `if` expr `{` block `}` `else` rule
+ { $$.parse = pif( $2.parse, $4.parse, $7.parse ); }
+ | local_opt `rule` ARG arglist_opt rule
+ { $$.parse = psetc( $3.string, $5.parse, $4.parse, $1.number ); }
+ | `on` arg rule
+ { $$.parse = pon( $2.parse, $3.parse ); }
+ | `actions` eflags ARG bindlist `{`
+ { yymode( SCAN_STRING ); }
+ STRING
+ { yymode( SCAN_NORMAL ); }
+ `}`
+ { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
+ ;
+
+/*
+ * assign - = or +=
+ */
+
+assign : `=`
+ { $$.number = ASSIGN_SET; }
+ | `+=`
+ { $$.number = ASSIGN_APPEND; }
+ | `?=`
+ { $$.number = ASSIGN_DEFAULT; }
+ | `default` `=`
+ { $$.number = ASSIGN_DEFAULT; }
+ ;
+
+/*
+ * expr - an expression for if
+ */
+expr : arg
+ { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); }
+ | expr `=` expr
+ { $$.parse = peval( EXPR_EQUALS, $1.parse, $3.parse ); }
+ | expr `!=` expr
+ { $$.parse = peval( EXPR_NOTEQ, $1.parse, $3.parse ); }
+ | expr `<` expr
+ { $$.parse = peval( EXPR_LESS, $1.parse, $3.parse ); }
+ | expr `<=` expr
+ { $$.parse = peval( EXPR_LESSEQ, $1.parse, $3.parse ); }
+ | expr `>` expr
+ { $$.parse = peval( EXPR_MORE, $1.parse, $3.parse ); }
+ | expr `>=` expr
+ { $$.parse = peval( EXPR_MOREEQ, $1.parse, $3.parse ); }
+ | expr `&` expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
+ | expr `&&` expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $3.parse ); }
+ | expr `|` expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
+ | expr `||` expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $3.parse ); }
+ | arg `in` list
+ { $$.parse = peval( EXPR_IN, $1.parse, $3.parse ); }
+ | `!` expr
+ { $$.parse = peval( EXPR_NOT, $2.parse, pnull() ); }
+ | `(` expr `)`
+ { $$.parse = $2.parse; }
+ ;
+
+
+/*
+ * cases - action elements inside a 'switch'
+ * case - a single action element inside a 'switch'
+ * right-recursive rule so cases can be examined in order.
+ */
+
+cases : /* empty */
+ { $$.parse = P0; }
+ | case cases
+ { $$.parse = pnode( $1.parse, $2.parse ); }
+ ;
+
+case : `case` ARG `:` block
+ { $$.parse = psnode( $2.string, $4.parse ); }
+ ;
+
+/*
+ * lol - list of lists
+ * right-recursive rule so that lists can be added in order.
+ */
+
+lol : list
+ { $$.parse = pnode( P0, $1.parse ); }
+ | list `:` lol
+ { $$.parse = pnode( $3.parse, $1.parse ); }
+ ;
+
+/*
+ * list - zero or more args in a LIST
+ * listp - list (in puncutation only mode)
+ * arg - one ARG or function call
+ */
+
+list : listp
+ { $$.parse = $1.parse; yymode( SCAN_NORMAL ); }
+ ;
+
+listp : /* empty */
+ { $$.parse = pnull(); yymode( SCAN_PUNCT ); }
+ | listp arg
+ { $$.parse = pappend( $1.parse, $2.parse ); }
+ ;
+
+arg : ARG
+ { $$.parse = plist( $1.string ); }
+ | `[` { yymode( SCAN_NORMAL ); } func `]`
+ { $$.parse = $3.parse; }
+ ;
+
+/*
+ * func - a function call (inside [])
+ * This needs to be split cleanly out of 'rule'
+ */
+
+func : arg lol
+ { $$.parse = prule( $1.string, $2.parse ); }
+ | `on` arg arg lol
+ { $$.parse = pon( $2.parse, prule( $3.string, $4.parse ) ); }
+ | `on` arg `return` list
+ { $$.parse = pon( $2.parse, $4.parse ); }
+ ;
+
+
+/*
+ * eflags - zero or more modifiers to 'executes'
+ * eflag - a single modifier to 'executes'
+ */
+
+eflags : /* empty */
+ { $$.number = 0; }
+ | eflags eflag
+ { $$.number = $1.number | $2.number; }
+ ;
+
+eflag : `updated`
+ { $$.number = EXEC_UPDATED; }
+ | `together`
+ { $$.number = EXEC_TOGETHER; }
+ | `ignore`
+ { $$.number = EXEC_IGNORE; }
+ | `quietly`
+ { $$.number = EXEC_QUIETLY; }
+ | `piecemeal`
+ { $$.number = EXEC_PIECEMEAL; }
+ | `existing`
+ { $$.number = EXEC_EXISTING; }
+ ;
+
+
+/*
+ * bindlist - list of variable to bind for an action
+ */
+
+bindlist : /* empty */
+ { $$.parse = pnull(); }
+ | `bind` list
+ { $$.parse = $2.parse; }
+ ;
+
+
diff --git a/jam-files/engine/jamgramtab.h b/jam-files/engine/jamgramtab.h
new file mode 100644
index 000000000..a0fd43f6a
--- /dev/null
+++ b/jam-files/engine/jamgramtab.h
@@ -0,0 +1,44 @@
+ { "!", _BANG_t },
+ { "!=", _BANG_EQUALS_t },
+ { "&", _AMPER_t },
+ { "&&", _AMPERAMPER_t },
+ { "(", _LPAREN_t },
+ { ")", _RPAREN_t },
+ { "+=", _PLUS_EQUALS_t },
+ { ":", _COLON_t },
+ { ";", _SEMIC_t },
+ { "<", _LANGLE_t },
+ { "<=", _LANGLE_EQUALS_t },
+ { "=", _EQUALS_t },
+ { ">", _RANGLE_t },
+ { ">=", _RANGLE_EQUALS_t },
+ { "?=", _QUESTION_EQUALS_t },
+ { "[", _LBRACKET_t },
+ { "]", _RBRACKET_t },
+ { "actions", ACTIONS_t },
+ { "bind", BIND_t },
+ { "case", CASE_t },
+ { "class", CLASS_t },
+ { "default", DEFAULT_t },
+ { "else", ELSE_t },
+ { "existing", EXISTING_t },
+ { "for", FOR_t },
+ { "if", IF_t },
+ { "ignore", IGNORE_t },
+ { "in", IN_t },
+ { "include", INCLUDE_t },
+ { "local", LOCAL_t },
+ { "module", MODULE_t },
+ { "on", ON_t },
+ { "piecemeal", PIECEMEAL_t },
+ { "quietly", QUIETLY_t },
+ { "return", RETURN_t },
+ { "rule", RULE_t },
+ { "switch", SWITCH_t },
+ { "together", TOGETHER_t },
+ { "updated", UPDATED_t },
+ { "while", WHILE_t },
+ { "{", _LBRACE_t },
+ { "|", _BAR_t },
+ { "||", _BARBAR_t },
+ { "}", _RBRACE_t },
diff --git a/jam-files/engine/lists.c b/jam-files/engine/lists.c
new file mode 100644
index 000000000..ebabb63e9
--- /dev/null
+++ b/jam-files/engine/lists.c
@@ -0,0 +1,339 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "newstr.h"
+# include "lists.h"
+
+/*
+ * lists.c - maintain lists of strings
+ *
+ * This implementation essentially uses a singly linked list, but
+ * guarantees that the head element of every list has a valid pointer
+ * to the tail of the list, so the new elements can efficiently and
+ * properly be appended to the end of a list.
+ *
+ * To avoid massive allocation, list_free() just tacks the whole freed
+ * chain onto freelist and list_new() looks on freelist first for an
+ * available list struct. list_free() does not free the strings in the
+ * chain: it lazily lets list_new() do so.
+ *
+ * 08/23/94 (seiwald) - new list_append()
+ * 09/07/00 (seiwald) - documented lol_*() functions
+ */
+
+static LIST *freelist = 0; /* junkpile for list_free() */
+
+/*
+ * list_append() - append a list onto another one, returning total
+ */
+
+LIST * list_append( LIST * l, LIST * nl )
+{
+ if ( !nl )
+ {
+ /* Just return l */
+ }
+ else if ( !l )
+ {
+ l = nl;
+ }
+ else
+ {
+ /* Graft two non-empty lists. */
+ l->tail->next = nl;
+ l->tail = nl->tail;
+ }
+
+ return l;
+}
+
+/*
+ * list_new() - tack a string onto the end of a list of strings
+ */
+
+LIST * list_new( LIST * head, char * string )
+{
+ LIST * l;
+
+ if ( DEBUG_LISTS )
+ printf( "list > %s <\n", string );
+
+ /* Get list struct from freelist, if one available. */
+ /* Otherwise allocate. */
+ /* If from freelist, must free string first */
+
+ if ( freelist )
+ {
+ l = freelist;
+ freestr( l->string );
+ freelist = freelist->next;
+ }
+ else
+ {
+ l = (LIST *)BJAM_MALLOC( sizeof( LIST ) );
+ }
+
+ /* If first on chain, head points here. */
+ /* If adding to chain, tack us on. */
+ /* Tail must point to this new, last element. */
+
+ if ( !head ) head = l;
+ else head->tail->next = l;
+ head->tail = l;
+ l->next = 0;
+
+ l->string = string;
+
+ return head;
+}
+
+
+/*
+ * list_copy() - copy a whole list of strings (nl) onto end of another (l).
+ */
+
+LIST * list_copy( LIST * l, LIST * nl )
+{
+ for ( ; nl; nl = list_next( nl ) )
+ l = list_new( l, copystr( nl->string ) );
+ return l;
+}
+
+
+/*
+ * list_sublist() - copy a subset of a list of strings.
+ */
+
+LIST * list_sublist( LIST * l, int start, int count )
+{
+ LIST * nl = 0;
+ for ( ; l && start--; l = list_next( l ) );
+ for ( ; l && count--; l = list_next( l ) )
+ nl = list_new( nl, copystr( l->string ) );
+ return nl;
+}
+
+
+static int str_ptr_compare( void const * va, void const * vb )
+{
+ char * a = *( (char * *)va );
+ char * b = *( (char * *)vb );
+ return strcmp(a, b);
+}
+
+
+LIST * list_sort( LIST * l )
+{
+ int len;
+ int ii;
+ char * * strings;
+ LIST * listp;
+ LIST * result = 0;
+
+ if ( !l )
+ return L0;
+
+ len = list_length( l );
+ strings = (char * *)BJAM_MALLOC( len * sizeof(char*) );
+
+ listp = l;
+ for ( ii = 0; ii < len; ++ii )
+ {
+ strings[ ii ] = listp->string;
+ listp = listp->next;
+ }
+
+ qsort( strings, len, sizeof( char * ), str_ptr_compare );
+
+ for ( ii = 0; ii < len; ++ii )
+ result = list_append( result, list_new( 0, strings[ ii ] ) );
+
+ BJAM_FREE( strings );
+
+ return result;
+}
+
+
+/*
+ * list_free() - free a list of strings
+ */
+
+void list_free( LIST * head )
+{
+ /* Just tack onto freelist. */
+ if ( head )
+ {
+ head->tail->next = freelist;
+ freelist = head;
+ }
+}
+
+
+/*
+ * list_pop_front() - remove the front element from a list of strings
+ */
+
+LIST * list_pop_front( LIST * l )
+{
+ LIST * result = l->next;
+ if ( result )
+ {
+ result->tail = l->tail;
+ l->next = L0;
+ l->tail = l;
+ }
+ list_free( l );
+ return result;
+}
+
+
+/*
+ * list_print() - print a list of strings to stdout
+ */
+
+void list_print( LIST * l )
+{
+ LIST * p = 0;
+ for ( ; l; p = l, l = list_next( l ) )
+ if ( p )
+ printf( "%s ", p->string );
+ if ( p )
+ printf( "%s", p->string );
+}
+
+
+/*
+ * list_length() - return the number of items in the list
+ */
+
+int list_length( LIST * l )
+{
+ int n = 0;
+ for ( ; l; l = list_next( l ), ++n );
+ return n;
+}
+
+
+int list_in( LIST * l, char * value )
+{
+ for ( ; l; l = l->next )
+ if ( strcmp( l->string, value ) == 0 )
+ return 1;
+ return 0;
+}
+
+
+LIST * list_unique( LIST * sorted_list )
+{
+ LIST * result = 0;
+ LIST * last_added = 0;
+
+ for ( ; sorted_list; sorted_list = sorted_list->next )
+ {
+ if ( !last_added || strcmp( sorted_list->string, last_added->string ) != 0 )
+ {
+ result = list_new( result, sorted_list->string );
+ last_added = sorted_list;
+ }
+ }
+ return result;
+}
+
+
+/*
+ * lol_init() - initialize a LOL (list of lists).
+ */
+
+void lol_init( LOL * lol )
+{
+ lol->count = 0;
+}
+
+
+/*
+ * lol_add() - append a LIST onto an LOL.
+ */
+
+void lol_add( LOL * lol, LIST * l )
+{
+ if ( lol->count < LOL_MAX )
+ lol->list[ lol->count++ ] = l;
+}
+
+
+/*
+ * lol_free() - free the LOL and its LISTs.
+ */
+
+void lol_free( LOL * lol )
+{
+ int i;
+ for ( i = 0; i < lol->count; ++i )
+ list_free( lol->list[ i ] );
+ lol->count = 0;
+}
+
+
+/*
+ * lol_get() - return one of the LISTs in the LOL.
+ */
+
+LIST * lol_get( LOL * lol, int i )
+{
+ return i < lol->count ? lol->list[ i ] : 0;
+}
+
+
+/*
+ * lol_print() - debug print LISTS separated by ":".
+ */
+
+void lol_print( LOL * lol )
+{
+ int i;
+
+ for ( i = 0; i < lol->count; ++i )
+ {
+ if ( i )
+ printf( " : " );
+ list_print( lol->list[ i ] );
+ }
+}
+
+#ifdef HAVE_PYTHON
+
+PyObject *list_to_python(LIST *l)
+{
+ PyObject *result = PyList_New(0);
+
+ for (; l; l = l->next)
+ {
+ PyObject* s = PyString_FromString(l->string);
+ PyList_Append(result, s);
+ Py_DECREF(s);
+ }
+
+ return result;
+}
+
+LIST *list_from_python(PyObject *l)
+{
+ LIST * result = 0;
+
+ Py_ssize_t i, n;
+ n = PySequence_Size(l);
+ for (i = 0; i < n; ++i)
+ {
+ PyObject *v = PySequence_GetItem(l, i);
+ result = list_new (result, newstr (PyString_AsString(v)));
+ Py_DECREF(v);
+ }
+
+ return result;
+}
+
+#endif
diff --git a/jam-files/engine/lists.h b/jam-files/engine/lists.h
new file mode 100644
index 000000000..1dc598274
--- /dev/null
+++ b/jam-files/engine/lists.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * lists.h - the LIST structure and routines to manipulate them
+ *
+ * The whole of jam relies on lists of strings as a datatype. This
+ * module, in conjunction with newstr.c, handles these relatively
+ * efficiently.
+ *
+ * Structures defined:
+ *
+ * LIST - list of strings
+ * LOL - list of LISTs
+ *
+ * External routines:
+ *
+ * list_append() - append a list onto another one, returning total
+ * list_new() - tack a string onto the end of a list of strings
+ * list_copy() - copy a whole list of strings
+ * list_sublist() - copy a subset of a list of strings
+ * list_free() - free a list of strings
+ * list_print() - print a list of strings to stdout
+ * list_length() - return the number of items in the list
+ *
+ * lol_init() - initialize a LOL (list of lists)
+ * lol_add() - append a LIST onto an LOL
+ * lol_free() - free the LOL and its LISTs
+ * lol_get() - return one of the LISTs in the LOL
+ * lol_print() - debug print LISTS separated by ":"
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 08/23/94 (seiwald) - new list_append()
+ */
+
+#ifndef LISTS_DWA20011022_H
+# define LISTS_DWA20011022_H
+
+#ifdef HAVE_PYTHON
+#include <Python.h>
+#endif
+
+/*
+ * LIST - list of strings
+ */
+
+typedef struct _list LIST;
+
+struct _list {
+ LIST *next;
+ LIST *tail; /* only valid in head node */
+ char *string; /* private copy */
+};
+
+/*
+ * LOL - list of LISTs
+ */
+
+typedef struct _lol LOL;
+
+# define LOL_MAX 19
+
+struct _lol {
+ int count;
+ LIST *list[ LOL_MAX ];
+};
+
+LIST * list_append( LIST *l, LIST *nl );
+LIST * list_copy( LIST *l, LIST *nl );
+void list_free( LIST *head );
+LIST * list_new( LIST *head, char *string );
+void list_print( LIST *l );
+int list_length( LIST *l );
+LIST * list_sublist( LIST *l, int start, int count );
+LIST * list_pop_front( LIST *l );
+LIST * list_sort( LIST *l);
+LIST * list_unique( LIST *sorted_list);
+int list_in(LIST* l, char* value);
+
+# define list_next( l ) ((l)->next)
+
+# define L0 ((LIST *)0)
+
+void lol_add( LOL *lol, LIST *l );
+void lol_init( LOL *lol );
+void lol_free( LOL *lol );
+LIST * lol_get( LOL *lol, int i );
+void lol_print( LOL *lol );
+void lol_build( LOL* lol, char** elements );
+
+#ifdef HAVE_PYTHON
+
+PyObject *list_to_python(LIST *l);
+LIST *list_from_python(PyObject *l);
+
+#endif
+
+#endif
+
diff --git a/jam-files/engine/make.c b/jam-files/engine/make.c
new file mode 100644
index 000000000..c871f0be2
--- /dev/null
+++ b/jam-files/engine/make.c
@@ -0,0 +1,814 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make.c - bring a target up to date, once rules are in place.
+ *
+ * This modules controls the execution of rules to bring a target and its
+ * dependencies up to date. It is invoked after the targets, rules, et. al.
+ * described in rules.h are created by the interpreting jam files.
+ *
+ * This file contains the main make() entry point and the first pass make0().
+ * The second pass, make1(), which actually does the command execution, is in
+ * make1.c.
+ *
+ * External routines:
+ * make() - make a target, given its name
+ *
+ * Internal routines:
+ * make0() - bind and scan everything to make a TARGET
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest)
+ *
+ * 12/26/93 (seiwald) - allow NOTIME targets to be expanded via $(<), $(>).
+ * 01/04/94 (seiwald) - print all targets, bounded, when tracing commands.
+ * 04/08/94 (seiwald) - progress report now reflects only targets with actions.
+ * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
+ * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
+ * 12/20/94 (seiwald) - make0() headers after determining fate of target, so
+ * that headers are not seen as being dependent on
+ * themselves.
+ * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
+ * 02/02/95 (seiwald) - propagate leaf source time for new LEAVES rule.
+ * 02/14/95 (seiwald) - NOUPDATE rule means don't update existing target.
+ * 08/22/95 (seiwald) - NOUPDATE targets immune to anyhow (-a) flag.
+ * 09/06/00 (seiwald) - NOCARE affects targets with sources/actions.
+ * 03/02/01 (seiwald) - reverse NOCARE change.
+ * 03/14/02 (seiwald) - TEMPORARY targets no longer take on parents age.
+ * 03/16/02 (seiwald) - support for -g (reorder builds by source time).
+ */
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "variable.h"
+#include "rules.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+ #include "hcache.h"
+#endif
+
+#include "search.h"
+#include "newstr.h"
+#include "make.h"
+#include "headers.h"
+#include "command.h"
+#include <assert.h>
+
+#ifndef max
+ #define max( a,b ) ((a)>(b)?(a):(b))
+#endif
+
+static TARGETS * make0sort( TARGETS * c );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ static void dependGraphOutput( TARGET * t, int depth );
+#endif
+
+static const char * target_fate[] =
+{
+ "init", /* T_FATE_INIT */
+ "making", /* T_FATE_MAKING */
+ "stable", /* T_FATE_STABLE */
+ "newer", /* T_FATE_NEWER */
+ "temp", /* T_FATE_ISTMP */
+ "touched", /* T_FATE_TOUCHED */
+ "rebuild", /* T_FATE_REBUILD */
+ "missing", /* T_FATE_MISSING */
+ "needtmp", /* T_FATE_NEEDTMP */
+ "old", /* T_FATE_OUTDATED */
+ "update", /* T_FATE_UPDATE */
+ "nofind", /* T_FATE_CANTFIND */
+ "nomake" /* T_FATE_CANTMAKE */
+};
+
+static const char * target_bind[] =
+{
+ "unbound",
+ "missing",
+ "parents",
+ "exists",
+};
+
+# define spaces(x) ( " " + ( x > 20 ? 0 : 20-x ) )
+
+
+/*
+ * make() - make a target, given its name.
+ */
+
+int make( int n_targets, char const * * targets, int anyhow )
+{
+ int i;
+ COUNTS counts[ 1 ];
+ int status = 0; /* 1 if anything fails */
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_init();
+#endif
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ /* First bind all targets with LOCATE_TARGET setting. This is needed to
+ * correctly handle dependencies to generated headers.
+ */
+ bind_explicitly_located_targets();
+
+ {
+ PROFILE_ENTER( MAKE_MAKE0 );
+ for ( i = 0; i < n_targets; ++i )
+ make0( bindtarget( targets[ i ] ), 0, 0, counts, anyhow );
+ PROFILE_EXIT( MAKE_MAKE0 );
+ }
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_GRAPH )
+ for ( i = 0; i < n_targets; ++i )
+ dependGraphOutput( bindtarget( targets[ i ] ), 0 );
+#endif
+
+ if ( DEBUG_MAKE )
+ {
+ if ( counts->targets )
+ printf( "...found %d target%s...\n", counts->targets,
+ counts->targets > 1 ? "s" : "" );
+ if ( counts->temp )
+ printf( "...using %d temp target%s...\n", counts->temp,
+ counts->temp > 1 ? "s" : "" );
+ if ( counts->updating )
+ printf( "...updating %d target%s...\n", counts->updating,
+ counts->updating > 1 ? "s" : "" );
+ if ( counts->cantfind )
+ printf( "...can't find %d target%s...\n", counts->cantfind,
+ counts->cantfind > 1 ? "s" : "" );
+ if ( counts->cantmake )
+ printf( "...can't make %d target%s...\n", counts->cantmake,
+ counts->cantmake > 1 ? "s" : "" );
+ }
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_done();
+#endif
+
+ status = counts->cantfind || counts->cantmake;
+
+ {
+ PROFILE_ENTER( MAKE_MAKE1 );
+ for ( i = 0; i < n_targets; ++i )
+ status |= make1( bindtarget( targets[ i ] ) );
+ PROFILE_EXIT( MAKE_MAKE1 );
+ }
+
+ return status;
+}
+
+
+/* Force any dependants of t that have already at least begun being visited by
+ * make0() to be updated.
+ */
+
+static void update_dependants( TARGET * t )
+{
+ TARGETS * q;
+
+ for ( q = t->dependants; q; q = q->next )
+ {
+ TARGET * p = q->target;
+ char fate0 = p->fate;
+
+ /* If we have already at least begun visiting it and we are not already
+ * rebuilding it for other reasons.
+ */
+ if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) )
+ {
+ p->fate = T_FATE_UPDATE;
+
+ if ( DEBUG_FATE )
+ {
+ printf( "fate change %s from %s to %s (as dependant of %s)\n",
+ p->name, target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], t->name );
+ }
+
+ /* If we are done visiting it, go back and make sure its dependants
+ * get rebuilt.
+ */
+ if ( fate0 > T_FATE_MAKING )
+ update_dependants( p );
+ }
+ }
+}
+
+
+/*
+ * Make sure that all of t's rebuilds get rebuilt.
+ */
+
+static void force_rebuilds( TARGET * t )
+{
+ TARGETS * d;
+ for ( d = t->rebuilds; d; d = d->next )
+ {
+ TARGET * r = d->target;
+
+ /* If it is not already being rebuilt for other reasons. */
+ if ( r->fate < T_FATE_BUILD )
+ {
+ if ( DEBUG_FATE )
+ printf( "fate change %s from %s to %s (by rebuild)\n",
+ r->name, target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] );
+
+ /* Force rebuild it. */
+ r->fate = T_FATE_REBUILD;
+
+ /* And make sure its dependants are updated too. */
+ update_dependants( r );
+ }
+ }
+}
+
+
+/*
+ * make0() - bind and scan everything to make a TARGET.
+ *
+ * Recursively binds a target, searches for #included headers, calls itself on
+ * those headers and any dependencies.
+ */
+
+void make0
+(
+ TARGET * t,
+ TARGET * p, /* parent */
+ int depth, /* for display purposes */
+ COUNTS * counts, /* for reporting */
+ int anyhow
+) /* forcibly touch all (real) targets */
+{
+ TARGETS * c;
+ TARGET * ptime = t;
+ time_t last;
+ time_t leaf;
+ time_t hlast;
+ int fate;
+ char const * flag = "";
+ SETTINGS * s;
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ int savedFate, oldTimeStamp;
+#endif
+
+ if ( DEBUG_MAKEPROG )
+ printf( "make\t--\t%s%s\n", spaces( depth ), t->name );
+
+ /*
+ * Step 1: initialize
+ */
+
+ if ( DEBUG_MAKEPROG )
+ printf( "make\t--\t%s%s\n", spaces( depth ), t->name );
+
+ t->fate = T_FATE_MAKING;
+
+ /*
+ * Step 2: under the influence of "on target" variables,
+ * bind the target and search for headers.
+ */
+
+ /* Step 2a: set "on target" variables. */
+ s = copysettings( t->settings );
+ pushsettings( s );
+
+ /* Step 2b: find and timestamp the target file (if it is a file). */
+ if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ char * another_target;
+ t->boundname = search( t->name, &t->time, &another_target,
+ t->flags & T_FLAG_ISFILE );
+ /* If it was detected that this target refers to an already existing and
+ * bound one, we add an include dependency, so that every target
+ * depending on us will depend on that other target as well.
+ */
+ if ( another_target )
+ target_include( t, bindtarget( another_target ) );
+
+ t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
+ }
+
+ /* INTERNAL, NOTFILE header nodes have the time of their parents. */
+ if ( p && ( t->flags & T_FLAG_INTERNAL ) )
+ ptime = p;
+
+ /* If temp file does not exist but parent does, use parent. */
+ if ( p && ( t->flags & T_FLAG_TEMP ) &&
+ ( t->binding == T_BIND_MISSING ) &&
+ ( p->binding != T_BIND_MISSING ) )
+ {
+ t->binding = T_BIND_PARENTS;
+ ptime = p;
+ }
+
+#ifdef OPT_SEMAPHORE
+ {
+ LIST * var = var_get( "JAM_SEMAPHORE" );
+ if ( var )
+ {
+ TARGET * semaphore = bindtarget( var->string );
+ semaphore->progress = T_MAKE_SEMAPHORE;
+ t->semaphore = semaphore;
+ }
+ }
+#endif
+
+ /* Step 2c: If its a file, search for headers. */
+ if ( t->binding == T_BIND_EXISTS )
+ headers( t );
+
+ /* Step 2d: reset "on target" variables. */
+ popsettings( s );
+ freesettings( s );
+
+ /*
+ * Pause for a little progress reporting .
+ */
+
+ if ( DEBUG_BIND )
+ {
+ if ( strcmp( t->name, t->boundname ) )
+ printf( "bind\t--\t%s%s: %s\n",
+ spaces( depth ), t->name, t->boundname );
+
+ switch ( t->binding )
+ {
+ case T_BIND_UNBOUND:
+ case T_BIND_MISSING:
+ case T_BIND_PARENTS:
+ printf( "time\t--\t%s%s: %s\n",
+ spaces( depth ), t->name, target_bind[ (int) t->binding ] );
+ break;
+
+ case T_BIND_EXISTS:
+ printf( "time\t--\t%s%s: %s",
+ spaces( depth ), t->name, ctime( &t->time ) );
+ break;
+ }
+ }
+
+ /*
+ * Step 3: recursively make0() dependencies & headers.
+ */
+
+ /* Step 3a: recursively make0() dependencies. */
+ for ( c = t->depends; c; c = c->next )
+ {
+ int internal = t->flags & T_FLAG_INTERNAL;
+
+ /* Warn about circular deps, except for includes, which include each
+ * other alot.
+ */
+ if ( c->target->fate == T_FATE_INIT )
+ make0( c->target, ptime, depth + 1, counts, anyhow );
+ else if ( c->target->fate == T_FATE_MAKING && !internal )
+ printf( "warning: %s depends on itself\n", c->target->name );
+ }
+
+ /* Step 3b: recursively make0() internal includes node. */
+ if ( t->includes )
+ make0( t->includes, p, depth + 1, counts, anyhow );
+
+ /* Step 3c: add dependencies' includes to our direct dependencies. */
+ {
+ TARGETS * incs = 0;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->includes )
+ incs = targetentry( incs, c->target->includes );
+ t->depends = targetchain( t->depends, incs );
+ }
+
+ /*
+ * Step 4: compute time & fate
+ */
+
+ /* Step 4a: pick up dependencies' time and fate */
+ last = 0;
+ leaf = 0;
+ fate = T_FATE_STABLE;
+ for ( c = t->depends; c; c = c->next )
+ {
+ /* If LEAVES has been applied, we only heed the timestamps of the leaf
+ * source nodes.
+ */
+ leaf = max( leaf, c->target->leaf );
+
+ if ( t->flags & T_FLAG_LEAVES )
+ {
+ last = leaf;
+ continue;
+ }
+
+ last = max( last, c->target->time );
+ fate = max( fate, c->target->fate );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate < c->target->fate )
+ printf( "fate change %s from %s to %s by dependency %s\n",
+ t->name, target_fate[(int) fate], target_fate[(int) c->target->fate],
+ c->target->name );
+#endif
+ }
+
+ /* Step 4b: pick up included headers time */
+
+ /*
+ * If a header is newer than a temp source that includes it,
+ * the temp source will need building.
+ */
+
+ hlast = t->includes ? t->includes->time : 0;
+
+ /* Step 4c: handle NOUPDATE oddity.
+ *
+ * If a NOUPDATE file exists, mark it as having eternally old dependencies.
+ * Do not inherit our fate from our dependencies. Decide fate based only on
+ * other flags and our binding (done later).
+ */
+ if ( t->flags & T_FLAG_NOUPDATE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate != T_FATE_STABLE )
+ printf( "fate change %s back to stable, NOUPDATE.\n", t->name
+ );
+#endif
+
+ last = 0;
+ t->time = 0;
+
+ /* Do not inherit our fate from our dependencies. Decide fate based only
+ * upon other flags and our binding (done later).
+ */
+ fate = T_FATE_STABLE;
+ }
+
+ /* Step 4d: determine fate: rebuild target or what? */
+
+ /*
+ In English:
+ If can not find or make child, can not make target.
+ If children changed, make target.
+ If target missing, make it.
+ If children newer, make target.
+ If temp's children newer than parent, make temp.
+ If temp's headers newer than parent, make temp.
+ If deliberately touched, make it.
+ If up-to-date temp file present, use it.
+ If target newer than non-notfile parent, mark target newer.
+ Otherwise, stable!
+
+ Note this block runs from least to most stable:
+ as we make it further down the list, the target's
+ fate is getting stabler.
+ */
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ savedFate = fate;
+ oldTimeStamp = 0;
+#endif
+
+ if ( fate >= T_FATE_BROKEN )
+ {
+ fate = T_FATE_CANTMAKE;
+ }
+ else if ( fate >= T_FATE_SPOIL )
+ {
+ fate = T_FATE_UPDATE;
+ }
+ else if ( t->binding == T_BIND_MISSING )
+ {
+ fate = T_FATE_MISSING;
+ }
+ else if ( ( t->binding == T_BIND_EXISTS ) && ( last > t->time ) )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_OUTDATED;
+ }
+ else if ( ( t->binding == T_BIND_PARENTS ) && ( last > p->time ) )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( ( t->binding == T_BIND_PARENTS ) && ( hlast > p->time ) )
+ {
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( t->flags & T_FLAG_TOUCHED )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( ( t->binding == T_BIND_EXISTS ) && ( t->flags & T_FLAG_TEMP ) )
+ {
+ fate = T_FATE_ISTMP;
+ }
+ else if ( ( t->binding == T_BIND_EXISTS ) && p &&
+ ( p->binding != T_BIND_UNBOUND ) && ( t->time > p->time ) )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEWER;
+ }
+ else
+ {
+ fate = T_FATE_STABLE;
+ }
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE && ( fate != savedFate ) )
+ {
+ if ( savedFate == T_FATE_STABLE )
+ printf( "fate change %s set to %s%s\n", t->name,
+ target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" );
+ else
+ printf( "fate change %s from %s to %s%s\n", t->name,
+ target_fate[ savedFate ], target_fate[ fate ],
+ oldTimeStamp ? " (by timestamp)" : "" );
+ }
+#endif
+
+ /* Step 4e: handle missing files */
+ /* If it is missing and there are no actions to create it, boom. */
+ /* If we can not make a target we do not care about it, okay. */
+ /* We could insist that there are updating actions for all missing */
+ /* files, but if they have dependencies we just pretend it is a NOTFILE. */
+
+ if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends )
+ {
+ if ( t->flags & T_FLAG_NOCARE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ printf( "fate change %s to STABLE from %s, "
+ "no actions, no dependencies and do not care\n",
+ t->name, target_fate[ fate ] );
+#endif
+ fate = T_FATE_STABLE;
+ }
+ else
+ {
+ printf( "don't know how to make %s\n", t->name );
+ fate = T_FATE_CANTFIND;
+ }
+ }
+
+ /* Step 4f: propagate dependencies' time & fate. */
+ /* Set leaf time to be our time only if this is a leaf. */
+
+ t->time = max( t->time, last );
+ t->leaf = leaf ? leaf : t->time ;
+ /* This target's fate may have been updated by virtue of following some
+ * target's rebuilds list, so only allow it to be increased to the fate we
+ * have calculated. Otherwise, grab its new fate.
+ */
+ if ( fate > t->fate )
+ t->fate = fate;
+ else
+ fate = t->fate;
+
+ /* Step 4g: if this target needs to be built, force rebuild everything in
+ * this target's rebuilds list.
+ */
+ if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
+ force_rebuilds( t );
+
+ /*
+ * Step 5: sort dependencies by their update time.
+ */
+
+ if ( globs.newestfirst )
+ t->depends = make0sort( t->depends );
+
+ /*
+ * Step 6: a little harmless tabulating for tracing purposes
+ */
+
+ /* Do not count or report interal includes nodes. */
+ if ( t->flags & T_FLAG_INTERNAL )
+ return;
+
+ if ( counts )
+ {
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ ++counts->targets;
+#else
+ if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE )
+ printf( "...patience...\n" );
+#endif
+
+ if ( fate == T_FATE_ISTMP )
+ ++counts->temp;
+ else if ( fate == T_FATE_CANTFIND )
+ ++counts->cantfind;
+ else if ( ( fate == T_FATE_CANTMAKE ) && t->actions )
+ ++counts->cantmake;
+ else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) &&
+ t->actions )
+ ++counts->updating;
+ }
+
+ if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) )
+ flag = "+";
+ else if ( ( t->binding == T_BIND_EXISTS ) && p && ( t->time > p->time ) )
+ flag = "*";
+
+ if ( DEBUG_MAKEPROG )
+ printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int) t->fate ],
+ spaces( depth ), t->name );
+}
+
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+
+static const char * target_name( TARGET * t )
+{
+ static char buf[ 1000 ];
+ if ( t->flags & T_FLAG_INTERNAL )
+ {
+ sprintf( buf, "%s (internal node)", t->name );
+ return buf;
+ }
+ return t->name;
+}
+
+
+/*
+ * dependGraphOutput() - output the DG after make0 has run.
+ */
+
+static void dependGraphOutput( TARGET * t, int depth )
+{
+ TARGETS * c;
+
+ if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname )
+ return;
+
+ t->flags |= T_FLAG_VISITED;
+
+ switch ( t->fate )
+ {
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
+ break;
+ default:
+ printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
+ break;
+ }
+
+ if ( strcmp( t->name, t->boundname ) )
+ printf( " %s Loc: %s\n", spaces( depth ), t->boundname );
+
+ switch ( t->fate )
+ {
+ case T_FATE_STABLE:
+ printf( " %s : Stable\n", spaces( depth ) );
+ break;
+ case T_FATE_NEWER:
+ printf( " %s : Newer\n", spaces( depth ) );
+ break;
+ case T_FATE_ISTMP:
+ printf( " %s : Up to date temp file\n", spaces( depth ) );
+ break;
+ case T_FATE_NEEDTMP:
+ printf( " %s : Temporary file, to be updated\n", spaces( depth ) );
+ break;
+ case T_FATE_TOUCHED:
+ printf( " %s : Been touched, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_MISSING:
+ printf( " %s : Missing, creating it\n", spaces( depth ) );
+ break;
+ case T_FATE_OUTDATED:
+ printf( " %s : Outdated, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_REBUILD:
+ printf( " %s : Rebuild, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_UPDATE:
+ printf( " %s : Updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTFIND:
+ printf( " %s : Can not find it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTMAKE:
+ printf( " %s : Can make it\n", spaces( depth ) );
+ break;
+ }
+
+ if ( t->flags & ~T_FLAG_VISITED )
+ {
+ printf( " %s : ", spaces( depth ) );
+ if ( t->flags & T_FLAG_TEMP ) printf( "TEMPORARY " );
+ if ( t->flags & T_FLAG_NOCARE ) printf( "NOCARE " );
+ if ( t->flags & T_FLAG_NOTFILE ) printf( "NOTFILE " );
+ if ( t->flags & T_FLAG_TOUCHED ) printf( "TOUCHED " );
+ if ( t->flags & T_FLAG_LEAVES ) printf( "LEAVES " );
+ if ( t->flags & T_FLAG_NOUPDATE ) printf( "NOUPDATE " );
+ printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ {
+ printf( " %s : Depends on %s (%s)", spaces( depth ),
+ target_name( c->target ), target_fate[ (int) c->target->fate ] );
+ if ( c->target->time == t->time )
+ printf( " (max time)");
+ printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ dependGraphOutput( c->target, depth + 1 );
+}
+#endif
+
+
+/*
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest).
+ *
+ * We walk chain, taking each item and inserting it on the sorted result, with
+ * newest items at the front. This involves updating each of the TARGETS'
+ * c->next and c->tail. Note that we make c->tail a valid prev pointer for every
+ * entry. Normally, it is only valid at the head, where prev == tail. Note also
+ * that while tail is a loop, next ends at the end of the chain.
+ */
+
+static TARGETS * make0sort( TARGETS * chain )
+{
+ PROFILE_ENTER( MAKE_MAKE0SORT );
+
+ TARGETS * result = 0;
+
+ /* Walk the current target list. */
+ while ( chain )
+ {
+ TARGETS * c = chain;
+ TARGETS * s = result;
+
+ chain = chain->next;
+
+ /* Find point s in result for c. */
+ while ( s && ( s->target->time > c->target->time ) )
+ s = s->next;
+
+ /* Insert c in front of s (might be 0). Do not even think of deciphering
+ * this.
+ */
+ c->next = s; /* good even if s = 0 */
+ if ( result == s ) result = c; /* new head of chain? */
+ if ( !s ) s = result; /* wrap to ensure a next */
+ if ( result != c ) s->tail->next = c; /* not head? be prev's next */
+ c->tail = s->tail; /* take on next's prev */
+ s->tail = c; /* make next's prev us */
+ }
+
+ PROFILE_EXIT( MAKE_MAKE0SORT );
+ return result;
+}
+
+
+static LIST * targets_to_update_ = 0;
+
+
+void mark_target_for_updating( char * target )
+{
+ targets_to_update_ = list_new( targets_to_update_, target );
+}
+
+
+LIST * targets_to_update()
+{
+ return targets_to_update_;
+}
+
+
+void clear_targets_to_update()
+{
+ list_free( targets_to_update_ );
+ targets_to_update_ = 0;
+}
diff --git a/jam-files/engine/make.h b/jam-files/engine/make.h
new file mode 100644
index 000000000..b372263e1
--- /dev/null
+++ b/jam-files/engine/make.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * make.h - bring a target up to date, once rules are in place
+ */
+
+#include "lists.h"
+
+int make( int n_targets, const char **targets, int anyhow );
+int make1( TARGET *t );
+
+typedef struct {
+ int temp;
+ int updating;
+ int cantfind;
+ int cantmake;
+ int targets;
+ int made;
+} COUNTS ;
+
+
+void make0( TARGET *t, TARGET *p, int depth,
+ COUNTS *counts, int anyhow );
+
+
+/*
+ * Specifies that the target should be updated.
+ */
+void mark_target_for_updating(char *target);
+/*
+ * Returns the list of all the target previously passed to 'mark_target_for_updating'.
+ */
+LIST *targets_to_update();
+/*
+ * Cleasr/unmarks all targets that are currently marked for update.
+ */
+void clear_targets_to_update();
diff --git a/jam-files/engine/make1.c b/jam-files/engine/make1.c
new file mode 100644
index 000000000..8001f3339
--- /dev/null
+++ b/jam-files/engine/make1.c
@@ -0,0 +1,1145 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make1.c - execute command to bring targets up to date
+ *
+ * This module contains make1(), the entry point called by make() to
+ * recursively decend the dependency graph executing update actions as
+ * marked by make0().
+ *
+ * External routines:
+ *
+ * make1() - execute commands to update a TARGET and all of its dependencies.
+ *
+ * Internal routines, the recursive/asynchronous command executors:
+ *
+ * make1a() - recursively traverse dependency target tree, calling make1b().
+ * make1atail() - started processing all dependencies so go on to make1b().
+ * make1b() - when dependencies are up to date, build target with make1c().
+ * make1c() - launch target's next command, call parents' make1b() if none.
+ * make1d() - handle command execution completion and call back make1c().
+ *
+ * Internal support routines:
+ *
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
+ * make1settings() - for vars that get bound values, build up replacement lists.
+ * make1bind() - bind targets that weren't bound in dependency analysis.
+ *
+ * 04/16/94 (seiwald) - Split from make.c.
+ * 04/21/94 (seiwald) - Handle empty "updated" actions.
+ * 05/04/94 (seiwald) - async multiprocess (-j) support.
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources.
+ * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
+ * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
+ * 01/22/94 (seiwald) - pass per-target JAMSHELL down to exec_cmd().
+ * 02/28/95 (seiwald) - Handle empty "existing" actions.
+ * 03/10/95 (seiwald) - Fancy counts.
+ */
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "assert.h"
+#include "variable.h"
+#include "rules.h"
+#include "headers.h"
+
+#include "search.h"
+#include "newstr.h"
+#include "make.h"
+#include "command.h"
+#include "execcmd.h"
+#include "compile.h"
+#include "output.h"
+
+#include <stdlib.h>
+
+#if ! defined(NT) || defined(__GNUC__)
+ #include <unistd.h> /* for unlink */
+#endif
+
+static CMD * make1cmds ( TARGET * );
+static LIST * make1list ( LIST *, TARGETS *, int flags );
+static SETTINGS * make1settings( LIST * vars );
+static void make1bind ( TARGET * );
+
+/* Ugly static - it is too hard to carry it through the callbacks. */
+
+static struct
+{
+ int failed;
+ int skipped;
+ int total;
+ int made;
+} counts[ 1 ] ;
+
+/* Target state - remove recursive calls by just keeping track of state target
+ * is in.
+ */
+typedef struct _state
+{
+ struct _state * prev; /* previous state on stack */
+ TARGET * t; /* current target */
+ TARGET * parent; /* parent argument necessary for make1a() */
+#define T_STATE_MAKE1A 0 /* make1a() should be called */
+#define T_STATE_MAKE1ATAIL 1 /* make1atail() should be called */
+#define T_STATE_MAKE1B 2 /* make1b() should be called */
+#define T_STATE_MAKE1C 3 /* make1c() should be called */
+#define T_STATE_MAKE1D 4 /* make1d() should be called */
+ int curstate; /* current state */
+ int status;
+} state;
+
+static void make1a ( state * );
+static void make1atail ( state * );
+static void make1b ( state * );
+static void make1c ( state * );
+static void make1d ( state * );
+static void make_closure( void * closure, int status, timing_info *, char *, char * );
+
+typedef struct _stack
+{
+ state * stack;
+} stack;
+
+static stack state_stack = { NULL };
+
+static state * state_freelist = NULL;
+
+
+static state * alloc_state()
+{
+ if ( state_freelist != NULL )
+ {
+ state * pState = state_freelist;
+ state_freelist = pState->prev;
+ memset( pState, 0, sizeof( state ) );
+ return pState;
+ }
+
+ return (state *)BJAM_MALLOC( sizeof( state ) );
+}
+
+
+static void free_state( state * pState )
+{
+ pState->prev = state_freelist;
+ state_freelist = pState;
+}
+
+
+static void clear_state_freelist()
+{
+ while ( state_freelist != NULL )
+ {
+ state * pState = state_freelist;
+ state_freelist = state_freelist->prev;
+ BJAM_FREE( pState );
+ }
+}
+
+
+static state * current_state( stack * pStack )
+{
+ return pStack->stack;
+}
+
+
+static void pop_state( stack * pStack )
+{
+ if ( pStack->stack != NULL )
+ {
+ state * pState = pStack->stack->prev;
+ free_state( pStack->stack );
+ pStack->stack = pState;
+ }
+}
+
+
+static state * push_state( stack * pStack, TARGET * t, TARGET * parent, int curstate )
+{
+ state * pState = alloc_state();
+
+ pState->t = t;
+ pState->parent = parent;
+ pState->prev = pStack->stack;
+ pState->curstate = curstate;
+
+ pStack->stack = pState;
+
+ return pStack->stack;
+}
+
+
+/*
+ * Pushes a stack onto another stack, effectively reversing the order.
+ */
+
+static void push_stack_on_stack( stack * pDest, stack * pSrc )
+{
+ while ( pSrc->stack != NULL )
+ {
+ state * pState = pSrc->stack;
+ pSrc->stack = pSrc->stack->prev;
+ pState->prev = pDest->stack;
+ pDest->stack = pState;
+ }
+}
+
+
+/*
+ * make1() - execute commands to update a TARGET and all of its dependencies.
+ */
+
+static int intr = 0;
+
+int make1( TARGET * t )
+{
+ state * pState;
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ /* Recursively make the target and its dependencies. */
+ push_state( &state_stack, t, NULL, T_STATE_MAKE1A );
+
+ do
+ {
+ while ( ( pState = current_state( &state_stack ) ) != NULL )
+ {
+ if ( intr )
+ pop_state( &state_stack );
+
+ switch ( pState->curstate )
+ {
+ case T_STATE_MAKE1A : make1a ( pState ); break;
+ case T_STATE_MAKE1ATAIL: make1atail( pState ); break;
+ case T_STATE_MAKE1B : make1b ( pState ); break;
+ case T_STATE_MAKE1C : make1c ( pState ); break;
+ case T_STATE_MAKE1D : make1d ( pState ); break;
+ }
+ }
+ }
+ /* Wait for any outstanding commands to finish running. */
+ while ( exec_wait() );
+
+ clear_state_freelist();
+
+ /* Talk about it. */
+ if ( counts->failed )
+ printf( "...failed updating %d target%s...\n", counts->failed,
+ counts->failed > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->skipped )
+ printf( "...skipped %d target%s...\n", counts->skipped,
+ counts->skipped > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->made )
+ printf( "...updated %d target%s...\n", counts->made,
+ counts->made > 1 ? "s" : "" );
+
+ return counts->total != counts->made;
+}
+
+
+/*
+ * make1a() - recursively traverse target tree, calling make1b().
+ *
+ * Called to start processing a specified target. Does nothing if the target is
+ * already being processed or otherwise starts processing all of its
+ * dependencies. Once all of its dependencies have started being processed goes
+ * on and calls make1b() (actually does that indirectly via a helper
+ * make1atail() state).
+ */
+
+static void make1a( state * pState )
+{
+ TARGET * t = pState->t;
+ TARGETS * c;
+
+ /* If the parent is the first to try to build this target or this target is
+ * in the make1c() quagmire, arrange for the parent to be notified when this
+ * target is built.
+ */
+ if ( pState->parent )
+ switch ( pState->t->progress )
+ {
+ case T_MAKE_INIT:
+ case T_MAKE_ACTIVE:
+ case T_MAKE_RUNNING:
+ pState->t->parents = targetentry( pState->t->parents,
+ pState->parent );
+ ++pState->parent->asynccnt;
+ }
+
+ /* If this target is already being processed then do nothing. There is no
+ * need to start processing the same target all over again.
+ */
+ if ( pState->t->progress != T_MAKE_INIT )
+ {
+ pop_state( &state_stack );
+ return;
+ }
+
+ /* Asynccnt counts the dependencies preventing this target from proceeding
+ * to make1b() for actual building. We start off with a count of 1 to
+ * prevent anything from happening until we can notify all dependencies that
+ * they are needed. This 1 is accounted for when we call make1b() ourselves,
+ * below. Without this if a a dependency gets built before we finish
+ * processing all of our other dependencies our build might be triggerred
+ * prematurely.
+ */
+ pState->t->asynccnt = 1;
+
+ /* Add header nodes created during the building process. */
+ {
+ TARGETS * inc = 0;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->rescanned && c->target->includes )
+ inc = targetentry( inc, c->target->includes );
+ t->depends = targetchain( t->depends, inc );
+ }
+
+ /* Guard against circular dependencies. */
+ pState->t->progress = T_MAKE_ONSTACK;
+
+ {
+ stack temp_stack = { NULL };
+ for ( c = t->depends; c && !intr; c = c->next )
+ push_state( &temp_stack, c->target, pState->t, T_STATE_MAKE1A );
+
+ /* Using stacks reverses the order of execution. Reverse it back. */
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+
+ pState->curstate = T_STATE_MAKE1ATAIL;
+}
+
+
+/*
+ * make1atail() - started processing all dependencies so go on to make1b().
+ */
+
+static void make1atail( state * pState )
+{
+ pState->t->progress = T_MAKE_ACTIVE;
+ /* Now that all of our dependencies have bumped up our asynccnt we can
+ * remove our own internal bump added to prevent this target from being
+ * built before all of its dependencies start getting processed.
+ */
+ pState->curstate = T_STATE_MAKE1B;
+}
+
+
+/*
+ * make1b() - when dependencies are up to date, build target with make1c().
+ *
+ * Called after all dependencies have started being processed and after each of
+ * them finishes its processing. The target actually goes on to getting built in
+ * make1c() only after all of its dependencies have finished their processing.
+ */
+
+static void make1b( state * pState )
+{
+ TARGET * t = pState->t;
+ TARGETS * c;
+ TARGET * failed = 0;
+ char * failed_name = "dependencies";
+
+ /* If any dependencies are still outstanding, wait until they call make1b()
+ * to signal their completion.
+ */
+ if ( --pState->t->asynccnt )
+ {
+ pop_state( &state_stack );
+ return;
+ }
+
+ /* Try to aquire a semaphore. If it is locked, wait until the target that
+ * locked it is built and signal completition.
+ */
+#ifdef OPT_SEMAPHORE
+ if ( t->semaphore && t->semaphore->asynccnt )
+ {
+ /* Append 't' to the list of targets waiting on semaphore. */
+ t->semaphore->parents = targetentry( t->semaphore->parents, t );
+ t->asynccnt++;
+
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s is busy, delaying launch of %s\n",
+ t->semaphore->name, t->name );
+ pop_state( &state_stack );
+ return;
+ }
+#endif
+
+ /* Now ready to build target 't', if dependencies built OK. */
+
+ /* Collect status from dependencies. */
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->status > t->status && !( c->target->flags & T_FLAG_NOCARE ) )
+ {
+ failed = c->target;
+ pState->t->status = c->target->status;
+ }
+ /* If an internal header node failed to build, we want to output the target
+ * that it failed on.
+ */
+ if ( failed )
+ {
+ failed_name = failed->flags & T_FLAG_INTERNAL
+ ? failed->failed
+ : failed->name;
+ }
+ t->failed = failed_name;
+
+ /* If actions for building any of the dependencies have failed, bail.
+ * Otherwise, execute all actions to make the current target.
+ */
+ if ( ( pState->t->status == EXEC_CMD_FAIL ) && pState->t->actions )
+ {
+ ++counts->skipped;
+ if ( ( pState->t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD )
+ {
+ if ( !unlink( pState->t->boundname ) )
+ printf( "...removing outdated %s\n", pState->t->boundname );
+ }
+ else
+ printf( "...skipped %s for lack of %s...\n", pState->t->name, failed_name );
+ }
+
+ if ( pState->t->status == EXEC_CMD_OK )
+ switch ( pState->t->fate )
+ {
+ /* These are handled by the default case below now
+ case T_FATE_INIT:
+ case T_FATE_MAKING:
+ */
+
+ case T_FATE_STABLE:
+ case T_FATE_NEWER:
+ break;
+
+ case T_FATE_CANTFIND:
+ case T_FATE_CANTMAKE:
+ pState->t->status = EXEC_CMD_FAIL;
+ break;
+
+ case T_FATE_ISTMP:
+ if ( DEBUG_MAKE )
+ printf( "...using %s...\n", pState->t->name );
+ break;
+
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_NEEDTMP:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ case T_FATE_REBUILD:
+ /* Prepare commands for executing actions scheduled for this target
+ * and then schedule transfer to make1c() state to proceed with
+ * executing the prepared commands. Commands have their embedded
+ * variables automatically expanded, including making use of any "on
+ * target" variables.
+ */
+ if ( pState->t->actions )
+ {
+ ++counts->total;
+ if ( DEBUG_MAKE && !( counts->total % 100 ) )
+ printf( "...on %dth target...\n", counts->total );
+
+ pState->t->cmds = (char *)make1cmds( pState->t );
+ /* Set the target's "progress" so that make1c() counts it among
+ * its successes/failures.
+ */
+ pState->t->progress = T_MAKE_RUNNING;
+ }
+ break;
+
+ /* All possible fates should have been accounted for by now. */
+ default:
+ printf( "ERROR: %s has bad fate %d", pState->t->name,
+ pState->t->fate );
+ abort();
+ }
+
+ /* Call make1c() to begin the execution of the chain of commands needed to
+ * build the target. If we are not going to build the target (due of
+ * dependency failures or no commands needing to be run) the chain will be
+ * empty and make1c() will directly signal the target's completion.
+ */
+
+#ifdef OPT_SEMAPHORE
+ /* If there is a semaphore, indicate that it is in use. */
+ if ( pState->t->semaphore )
+ {
+ ++pState->t->semaphore->asynccnt;
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s now used by %s\n", pState->t->semaphore->name,
+ pState->t->name );
+ }
+#endif
+
+ pState->curstate = T_STATE_MAKE1C;
+}
+
+
+/*
+ * make1c() - launch target's next command, call parents' make1b() if none.
+ *
+ * If there are (more) commands to run to build this target (and we have not hit
+ * an error running earlier comands) we launch the command using exec_cmd(). If
+ * there are no more commands to run, we collect the status from all the actions
+ * and report our completion to all the parents.
+ */
+
+static void make1c( state * pState )
+{
+ CMD * cmd = (CMD *)pState->t->cmds;
+
+ if ( cmd && ( pState->t->status == EXEC_CMD_OK ) )
+ {
+ char * rule_name = 0;
+ char * target = 0;
+
+ if ( DEBUG_MAKEQ ||
+ ( !( cmd->rule->actions->flags & RULE_QUIETLY ) && DEBUG_MAKE ) )
+ {
+ rule_name = cmd->rule->name;
+ target = lol_get( &cmd->args, 0 )->string;
+ if ( globs.noexec )
+ out_action( rule_name, target, cmd->buf, "", "", EXIT_OK );
+ }
+
+ if ( globs.noexec )
+ {
+ pState->curstate = T_STATE_MAKE1D;
+ pState->status = EXEC_CMD_OK;
+ }
+ else
+ {
+ /* Pop state first because exec_cmd() could push state. */
+ pop_state( &state_stack );
+ exec_cmd( cmd->buf, make_closure, pState->t, cmd->shell, rule_name,
+ target );
+ }
+ }
+ else
+ {
+ TARGETS * c;
+ ACTIONS * actions;
+
+ /* Collect status from actions, and distribute it as well. */
+ for ( actions = pState->t->actions; actions; actions = actions->next )
+ if ( actions->action->status > pState->t->status )
+ pState->t->status = actions->action->status;
+ for ( actions = pState->t->actions; actions; actions = actions->next )
+ if ( pState->t->status > actions->action->status )
+ actions->action->status = pState->t->status;
+
+ /* Tally success/failure for those we tried to update. */
+ if ( pState->t->progress == T_MAKE_RUNNING )
+ switch ( pState->t->status )
+ {
+ case EXEC_CMD_OK : ++counts->made ; break;
+ case EXEC_CMD_FAIL: ++counts->failed; break;
+ }
+
+ /* Tell parents their dependency has been built. */
+ {
+ stack temp_stack = { NULL };
+ TARGET * t = pState->t;
+ TARGET * additional_includes = NULL;
+
+ t->progress = T_MAKE_DONE;
+
+ /* Target has been updated so rescan it for dependencies. */
+ if ( ( t->fate >= T_FATE_MISSING ) &&
+ ( t->status == EXEC_CMD_OK ) &&
+ !t->rescanned )
+ {
+ TARGET * target_to_rescan = t;
+ SETTINGS * s;
+
+ target_to_rescan->rescanned = 1;
+
+ if ( target_to_rescan->flags & T_FLAG_INTERNAL )
+ target_to_rescan = t->original_target;
+
+ /* Clean current includes. */
+ target_to_rescan->includes = 0;
+
+ s = copysettings( target_to_rescan->settings );
+ pushsettings( s );
+ headers( target_to_rescan );
+ popsettings( s );
+ freesettings( s );
+
+ if ( target_to_rescan->includes )
+ {
+ target_to_rescan->includes->rescanned = 1;
+ /* Tricky. The parents have already been processed, but they
+ * have not seen the internal node, because it was just
+ * created. We need to make the calls to make1a() that would
+ * have been made by the parents here, and also make sure
+ * all unprocessed parents will pick up the includes. We
+ * must make sure processing of the additional make1a()
+ * invocations is done before make1b() which means this
+ * target is built, otherwise the parent would be considered
+ * built before this make1a() processing has even started.
+ */
+ make0( target_to_rescan->includes, target_to_rescan->parents->target, 0, 0, 0 );
+ for ( c = target_to_rescan->parents; c; c = c->next )
+ c->target->depends = targetentry( c->target->depends,
+ target_to_rescan->includes );
+ /* Will be processed below. */
+ additional_includes = target_to_rescan->includes;
+ }
+ }
+
+ if ( additional_includes )
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, additional_includes, c->target, T_STATE_MAKE1A );
+
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B );
+
+#ifdef OPT_SEMAPHORE
+ /* If there is a semaphore, it is now free. */
+ if ( t->semaphore )
+ {
+ assert( t->semaphore->asynccnt == 1 );
+ --t->semaphore->asynccnt;
+
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s is now free\n", t->semaphore->name );
+
+ /* If anything is waiting, notify the next target. There is no
+ * point in notifying waiting targets, since they will be
+ * notified again.
+ */
+ if ( t->semaphore->parents )
+ {
+ TARGETS * first = t->semaphore->parents;
+ if ( first->next )
+ first->next->tail = first->tail;
+ t->semaphore->parents = first->next;
+
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: placing %s on stack\n", first->target->name );
+ push_state( &temp_stack, first->target, NULL, T_STATE_MAKE1B );
+ BJAM_FREE( first );
+ }
+ }
+#endif
+
+ /* Must pop state before pushing any more. */
+ pop_state( &state_stack );
+
+ /* Using stacks reverses the order of execution. Reverse it back. */
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+ }
+}
+
+
+/*
+ * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given
+ * timing_info.
+ */
+
+static void call_timing_rule( TARGET * target, timing_info * time )
+{
+ LIST * timing_rule;
+
+ pushsettings( target->settings );
+ timing_rule = var_get( "__TIMING_RULE__" );
+ popsettings( target->settings );
+
+ if ( timing_rule )
+ {
+ /* rule timing-rule ( args * : target : start end user system ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ frame_init( frame );
+
+ /* args * :: $(__TIMING_RULE__[2-]) */
+ lol_add( frame->args, list_copy( L0, timing_rule->next ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( L0, target->name ) );
+
+ /* start end user system :: info about the action command */
+ lol_add( frame->args, list_new( list_new( list_new( list_new( L0,
+ outf_time ( time->start ) ),
+ outf_time ( time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ) );
+
+ /* Call the rule. */
+ evaluate_rule( timing_rule->string, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * call_action_rule() - Look up the __ACTION_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given info,
+ * timing_info, executed command and command output.
+ */
+
+static void call_action_rule
+(
+ TARGET * target,
+ int status,
+ timing_info * time,
+ char * executed_command,
+ char * command_output
+)
+{
+ LIST * action_rule;
+
+ pushsettings( target->settings );
+ action_rule = var_get( "__ACTION_RULE__" );
+ popsettings( target->settings );
+
+ if ( action_rule )
+ {
+ /* rule action-rule (
+ args * :
+ target :
+ command status start end user system :
+ output ? ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ frame_init( frame );
+
+ /* args * :: $(__ACTION_RULE__[2-]) */
+ lol_add( frame->args, list_copy( L0, action_rule->next ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( L0, target->name ) );
+
+ /* command status start end user system :: info about the action command */
+ lol_add( frame->args,
+ list_new( list_new( list_new( list_new( list_new( list_new( L0,
+ newstr( executed_command ) ),
+ outf_int( status ) ),
+ outf_time( time->start ) ),
+ outf_time( time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ) );
+
+ /* output ? :: the output of the action command */
+ if ( command_output )
+ lol_add( frame->args, list_new( L0, newstr( command_output ) ) );
+ else
+ lol_add( frame->args, L0 );
+
+ /* Call the rule. */
+ evaluate_rule( action_rule->string, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * make_closure() - internal function passed as a notification callback for when
+ * commands finish getting executed by the OS.
+ */
+
+static void make_closure
+(
+ void * closure,
+ int status,
+ timing_info * time,
+ char * executed_command,
+ char * command_output
+)
+{
+ TARGET * built = (TARGET *)closure;
+
+ call_timing_rule( built, time );
+ if ( DEBUG_EXECCMD )
+ printf( "%f sec system; %f sec user\n", time->system, time->user );
+
+ call_action_rule( built, status, time, executed_command, command_output );
+
+ push_state( &state_stack, built, NULL, T_STATE_MAKE1D )->status = status;
+}
+
+
+/*
+ * make1d() - handle command execution completion and call back make1c().
+ *
+ * exec_cmd() has completed and now all we need to do is fiddle with the status
+ * and call back to make1c() so it can run the next command scheduled for
+ * building this target or close up the target's build process in case there are
+ * no more commands scheduled for it. On interrupts, we bail heavily.
+ */
+
+static void make1d( state * pState )
+{
+ TARGET * t = pState->t;
+ CMD * cmd = (CMD *)t->cmds;
+ int status = pState->status;
+
+ if ( t->flags & T_FLAG_FAIL_EXPECTED )
+ {
+ /* Invert execution result when FAIL_EXPECTED has been applied. */
+ switch ( status )
+ {
+ case EXEC_CMD_FAIL: status = EXEC_CMD_OK ; break;
+ case EXEC_CMD_OK: status = EXEC_CMD_FAIL; break;
+ }
+ }
+
+ if ( ( status == EXEC_CMD_FAIL ) &&
+ ( cmd->rule->actions->flags & RULE_IGNORE ) )
+ status = EXEC_CMD_OK;
+
+ /* On interrupt, set intr so _everything_ fails. */
+ if ( status == EXEC_CMD_INTR )
+ ++intr;
+
+ /* Print command text on failure. */
+ if ( ( status == EXEC_CMD_FAIL ) && DEBUG_MAKE )
+ {
+ if ( !DEBUG_EXEC )
+ printf( "%s\n", cmd->buf );
+
+ printf( "...failed %s ", cmd->rule->name );
+ list_print( lol_get( &cmd->args, 0 ) );
+ printf( "...\n" );
+ }
+
+ /* Treat failed commands as interrupts in case we were asked to stop the
+ * build in case of any errors.
+ */
+ if ( ( status == EXEC_CMD_FAIL ) && globs.quitquick )
+ ++intr;
+
+ /* If the command was interrupted or failed and the target is not
+ * "precious", remove the targets.
+ */
+ if (status != EXEC_CMD_OK)
+ {
+ LIST * targets = lol_get( &cmd->args, 0 );
+ for ( ; targets; targets = list_next( targets ) )
+ {
+ int need_unlink = 1;
+ TARGET* t = bindtarget ( targets->string );
+ if (t->flags & T_FLAG_PRECIOUS)
+ {
+ need_unlink = 0;
+ }
+ if (need_unlink && !unlink( targets->string ) )
+ printf( "...removing %s\n", targets->string );
+ }
+ }
+
+ /* Free this command and call make1c() to move onto the next one scheduled
+ * for building this same target.
+ */
+ t->status = status;
+ t->cmds = (char *)cmd_next( cmd );
+ cmd_free( cmd );
+ pState->curstate = T_STATE_MAKE1C;
+}
+
+
+/*
+ * swap_settings() - replace the settings from the current module and target
+ * with those from the new module and target
+ */
+
+static void swap_settings
+(
+ module_t * * current_module,
+ TARGET * * current_target,
+ module_t * new_module,
+ TARGET * new_target
+)
+{
+ if ( new_module == root_module() )
+ new_module = 0;
+
+ if ( ( new_target == *current_target ) && ( new_module == *current_module ) )
+ return;
+
+ if ( *current_target )
+ popsettings( (*current_target)->settings );
+
+ if ( new_module != *current_module )
+ {
+ if ( *current_module )
+ exit_module( *current_module );
+
+ *current_module = new_module;
+
+ if ( new_module )
+ enter_module( new_module );
+ }
+
+ *current_target = new_target;
+ if ( new_target )
+ pushsettings( new_target->settings );
+}
+
+
+/*
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ *
+ * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping
+ * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling
+ * RULE_NEWSRCS actions. The result is a chain of CMDs which can be expanded by
+ * var_string() and executed using exec_cmd().
+ */
+
+static CMD * make1cmds( TARGET * t )
+{
+ CMD * cmds = 0;
+ LIST * shell = 0;
+ module_t * settings_module = 0;
+ TARGET * settings_target = 0;
+ ACTIONS * a0;
+
+ /* Step through actions. Actions may be shared with other targets or grouped
+ * using RULE_TOGETHER, so actions already seen are skipped.
+ */
+ for ( a0 = t->actions ; a0; a0 = a0->next )
+ {
+ RULE * rule = a0->action->rule;
+ rule_actions * actions = rule->actions;
+ SETTINGS * boundvars;
+ LIST * nt;
+ LIST * ns;
+ ACTIONS * a1;
+ int start;
+ int chunk;
+ int length;
+
+ /* Only do rules with commands to execute. If this action has already
+ * been executed, use saved status.
+ */
+ if ( !actions || a0->action->running )
+ continue;
+
+ a0->action->running = 1;
+
+ /* Make LISTS of targets and sources. If `execute together` has been
+ * specified for this rule, tack on sources from each instance of this
+ * rule for this target.
+ */
+ nt = make1list( L0, a0->action->targets, 0 );
+ ns = make1list( L0, a0->action->sources, actions->flags );
+ if ( actions->flags & RULE_TOGETHER )
+ for ( a1 = a0->next; a1; a1 = a1->next )
+ if ( a1->action->rule == rule && !a1->action->running )
+ {
+ ns = make1list( ns, a1->action->sources, actions->flags );
+ a1->action->running = 1;
+ }
+
+ /* If doing only updated (or existing) sources, but none have been
+ * updated (or exist), skip this action.
+ */
+ if ( !ns && ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) )
+ {
+ list_free( nt );
+ continue;
+ }
+
+ swap_settings( &settings_module, &settings_target, rule->module, t );
+ if ( !shell )
+ shell = var_get( "JAMSHELL" ); /* shell is per-target */
+
+ /* If we had 'actions xxx bind vars' we bind the vars now. */
+ boundvars = make1settings( actions->bindlist );
+ pushsettings( boundvars );
+
+ /*
+ * Build command, starting with all source args.
+ *
+ * If cmd_new returns 0, it is because the resulting command length is
+ * > MAXLINE. In this case, we will slowly reduce the number of source
+ * arguments presented until it does fit. This only applies to actions
+ * that allow PIECEMEAL commands.
+ *
+ * While reducing slowly takes a bit of compute time to get things just
+ * right, it is worth it to get as close to MAXLINE as possible, because
+ * launching the commands we are executing is likely to be much more
+ * compute intensive.
+ *
+ * Note we loop through at least once, for sourceless actions.
+ */
+
+ start = 0;
+ chunk = length = list_length( ns );
+
+ do
+ {
+ /* Build cmd: cmd_new consumes its lists. */
+ CMD * cmd = cmd_new( rule,
+ list_copy( L0, nt ),
+ list_sublist( ns, start, chunk ),
+ list_copy( L0, shell ) );
+
+ if ( cmd )
+ {
+ /* It fit: chain it up. */
+ if ( !cmds ) cmds = cmd;
+ else cmds->tail->next = cmd;
+ cmds->tail = cmd;
+ start += chunk;
+ }
+ else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) )
+ {
+ /* Reduce chunk size slowly. */
+ chunk = chunk * 9 / 10;
+ }
+ else
+ {
+ /* Too long and not splittable. */
+ printf( "%s actions too long (max %d):\n", rule->name, MAXLINE
+ );
+
+ /* Tell the user what didn't fit. */
+ cmd = cmd_new( rule, list_copy( L0, nt ),
+ list_sublist( ns, start, chunk ),
+ list_new( L0, newstr( "%" ) ) );
+ fputs( cmd->buf, stdout );
+ exit( EXITBAD );
+ }
+ }
+ while ( start < length );
+
+ /* These were always copied when used. */
+ list_free( nt );
+ list_free( ns );
+
+ /* Free the variables whose values were bound by 'actions xxx bind
+ * vars'.
+ */
+ popsettings( boundvars );
+ freesettings( boundvars );
+ }
+
+ swap_settings( &settings_module, &settings_target, 0, 0 );
+ return cmds;
+}
+
+
+/*
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
+ */
+
+static LIST * make1list( LIST * l, TARGETS * targets, int flags )
+{
+ for ( ; targets; targets = targets->next )
+ {
+ TARGET * t = targets->target;
+
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) )
+ {
+ if ( ( t->binding != T_BIND_EXISTS ) && ( t->fate <= T_FATE_STABLE ) )
+ continue;
+ }
+ else
+ {
+ if ( ( flags & RULE_EXISTING ) && ( t->binding != T_BIND_EXISTS ) )
+ continue;
+
+ if ( ( flags & RULE_NEWSRCS ) && ( t->fate <= T_FATE_STABLE ) )
+ continue;
+ }
+
+ /* Prohibit duplicates for RULE_TOGETHER. */
+ if ( flags & RULE_TOGETHER )
+ {
+ LIST * m;
+ for ( m = l; m; m = m->next )
+ if ( !strcmp( m->string, t->boundname ) )
+ break;
+ if ( m )
+ continue;
+ }
+
+ /* Build new list. */
+ l = list_new( l, copystr( t->boundname ) );
+ }
+
+ return l;
+}
+
+
+/*
+ * make1settings() - for vars that get bound values, build up replacement lists.
+ */
+
+static SETTINGS * make1settings( LIST * vars )
+{
+ SETTINGS * settings = 0;
+
+ for ( ; vars; vars = list_next( vars ) )
+ {
+ LIST * l = var_get( vars->string );
+ LIST * nl = 0;
+
+ for ( ; l; l = list_next( l ) )
+ {
+ TARGET * t = bindtarget( l->string );
+
+ /* Make sure the target is bound. */
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ /* Build a new list. */
+ nl = list_new( nl, copystr( t->boundname ) );
+ }
+
+ /* Add to settings chain. */
+ settings = addsettings( settings, VAR_SET, vars->string, nl );
+ }
+
+ return settings;
+}
+
+
+/*
+ * make1bind() - bind targets that were not bound during dependency analysis
+ *
+ * Spot the kludge! If a target is not in the dependency tree, it did not get
+ * bound by make0(), so we have to do it here. Ugly.
+ */
+
+static void make1bind( TARGET * t )
+{
+ if ( t->flags & T_FLAG_NOTFILE )
+ return;
+
+ pushsettings( t->settings );
+ t->boundname = search( t->name, &t->time, 0, ( t->flags & T_FLAG_ISFILE ) );
+ t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
+ popsettings( t->settings );
+}
diff --git a/jam-files/engine/md5.c b/jam-files/engine/md5.c
new file mode 100644
index 000000000..c35d96c5e
--- /dev/null
+++ b/jam-files/engine/md5.c
@@ -0,0 +1,381 @@
+/*
+ Copyright (C) 1999, 2000, 2002 Aladdin Enterprises. All rights reserved.
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ L. Peter Deutsch
+ ghost@aladdin.com
+
+ */
+/* $Id: md5.c,v 1.6 2002/04/13 19:20:28 lpd Exp $ */
+/*
+ Independent implementation of MD5 (RFC 1321).
+
+ This code implements the MD5 Algorithm defined in RFC 1321, whose
+ text is available at
+ http://www.ietf.org/rfc/rfc1321.txt
+ The code is derived from the text of the RFC, including the test suite
+ (section A.5) but excluding the rest of Appendix A. It does not include
+ any code or documentation that is identified in the RFC as being
+ copyrighted.
+
+ The original and principal author of md5.c is L. Peter Deutsch
+ <ghost@aladdin.com>. Other authors are noted in the change history
+ that follows (in reverse chronological order):
+
+ 2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order
+ either statically or dynamically; added missing #include <string.h>
+ in library.
+ 2002-03-11 lpd Corrected argument list for main(), and added int return
+ type, in test program and T value program.
+ 2002-02-21 lpd Added missing #include <stdio.h> in test program.
+ 2000-07-03 lpd Patched to eliminate warnings about "constant is
+ unsigned in ANSI C, signed in traditional"; made test program
+ self-checking.
+ 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+ 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5).
+ 1999-05-03 lpd Original version.
+ */
+
+#include "md5.h"
+#include <string.h>
+
+#undef BYTE_ORDER /* 1 = big-endian, -1 = little-endian, 0 = unknown */
+#ifdef ARCH_IS_BIG_ENDIAN
+# define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1)
+#else
+# define BYTE_ORDER 0
+#endif
+
+#define T_MASK ((md5_word_t)~0)
+#define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87)
+#define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9)
+#define T3 0x242070db
+#define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111)
+#define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050)
+#define T6 0x4787c62a
+#define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec)
+#define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe)
+#define T9 0x698098d8
+#define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850)
+#define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e)
+#define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841)
+#define T13 0x6b901122
+#define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c)
+#define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71)
+#define T16 0x49b40821
+#define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d)
+#define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf)
+#define T19 0x265e5a51
+#define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855)
+#define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2)
+#define T22 0x02441453
+#define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e)
+#define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437)
+#define T25 0x21e1cde6
+#define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829)
+#define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278)
+#define T28 0x455a14ed
+#define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa)
+#define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07)
+#define T31 0x676f02d9
+#define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375)
+#define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd)
+#define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e)
+#define T35 0x6d9d6122
+#define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3)
+#define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb)
+#define T38 0x4bdecfa9
+#define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f)
+#define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f)
+#define T41 0x289b7ec6
+#define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805)
+#define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a)
+#define T44 0x04881d05
+#define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6)
+#define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a)
+#define T47 0x1fa27cf8
+#define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a)
+#define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb)
+#define T50 0x432aff97
+#define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58)
+#define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6)
+#define T53 0x655b59c3
+#define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d)
+#define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82)
+#define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e)
+#define T57 0x6fa87e4f
+#define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f)
+#define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb)
+#define T60 0x4e0811a1
+#define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d)
+#define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca)
+#define T63 0x2ad7d2bb
+#define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e)
+
+
+static void
+md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/)
+{
+ md5_word_t
+ a = pms->abcd[0], b = pms->abcd[1],
+ c = pms->abcd[2], d = pms->abcd[3];
+ md5_word_t t;
+#if BYTE_ORDER > 0
+ /* Define storage only for big-endian CPUs. */
+ md5_word_t X[16];
+#else
+ /* Define storage for little-endian or both types of CPUs. */
+ md5_word_t xbuf[16];
+ const md5_word_t *X;
+#endif
+
+ {
+#if BYTE_ORDER == 0
+ /*
+ * Determine dynamically whether this is a big-endian or
+ * little-endian machine, since we can use a more efficient
+ * algorithm on the latter.
+ */
+ static const int w = 1;
+
+ if (*((const md5_byte_t *)&w)) /* dynamic little-endian */
+#endif
+#if BYTE_ORDER <= 0 /* little-endian */
+ {
+ /*
+ * On little-endian machines, we can process properly aligned
+ * data without copying it.
+ */
+ if (!((data - (const md5_byte_t *)0) & 3)) {
+ /* data are properly aligned */
+ X = (const md5_word_t *)data;
+ } else {
+ /* not aligned */
+ memcpy(xbuf, data, 64);
+ X = xbuf;
+ }
+ }
+#endif
+#if BYTE_ORDER == 0
+ else /* dynamic big-endian */
+#endif
+#if BYTE_ORDER >= 0 /* big-endian */
+ {
+ /*
+ * On big-endian machines, we must arrange the bytes in the
+ * right order.
+ */
+ const md5_byte_t *xp = data;
+ int i;
+
+# if BYTE_ORDER == 0
+ X = xbuf; /* (dynamic only) */
+# else
+# define xbuf X /* (static only) */
+# endif
+ for (i = 0; i < 16; ++i, xp += 4)
+ xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24);
+ }
+#endif
+ }
+
+#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n))))
+
+ /* Round 1. */
+ /* Let [abcd k s i] denote the operation
+ a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */
+#define F(x, y, z) (((x) & (y)) | (~(x) & (z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + F(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 0, 7, T1);
+ SET(d, a, b, c, 1, 12, T2);
+ SET(c, d, a, b, 2, 17, T3);
+ SET(b, c, d, a, 3, 22, T4);
+ SET(a, b, c, d, 4, 7, T5);
+ SET(d, a, b, c, 5, 12, T6);
+ SET(c, d, a, b, 6, 17, T7);
+ SET(b, c, d, a, 7, 22, T8);
+ SET(a, b, c, d, 8, 7, T9);
+ SET(d, a, b, c, 9, 12, T10);
+ SET(c, d, a, b, 10, 17, T11);
+ SET(b, c, d, a, 11, 22, T12);
+ SET(a, b, c, d, 12, 7, T13);
+ SET(d, a, b, c, 13, 12, T14);
+ SET(c, d, a, b, 14, 17, T15);
+ SET(b, c, d, a, 15, 22, T16);
+#undef SET
+
+ /* Round 2. */
+ /* Let [abcd k s i] denote the operation
+ a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */
+#define G(x, y, z) (((x) & (z)) | ((y) & ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + G(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 1, 5, T17);
+ SET(d, a, b, c, 6, 9, T18);
+ SET(c, d, a, b, 11, 14, T19);
+ SET(b, c, d, a, 0, 20, T20);
+ SET(a, b, c, d, 5, 5, T21);
+ SET(d, a, b, c, 10, 9, T22);
+ SET(c, d, a, b, 15, 14, T23);
+ SET(b, c, d, a, 4, 20, T24);
+ SET(a, b, c, d, 9, 5, T25);
+ SET(d, a, b, c, 14, 9, T26);
+ SET(c, d, a, b, 3, 14, T27);
+ SET(b, c, d, a, 8, 20, T28);
+ SET(a, b, c, d, 13, 5, T29);
+ SET(d, a, b, c, 2, 9, T30);
+ SET(c, d, a, b, 7, 14, T31);
+ SET(b, c, d, a, 12, 20, T32);
+#undef SET
+
+ /* Round 3. */
+ /* Let [abcd k s t] denote the operation
+ a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */
+#define H(x, y, z) ((x) ^ (y) ^ (z))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + H(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 5, 4, T33);
+ SET(d, a, b, c, 8, 11, T34);
+ SET(c, d, a, b, 11, 16, T35);
+ SET(b, c, d, a, 14, 23, T36);
+ SET(a, b, c, d, 1, 4, T37);
+ SET(d, a, b, c, 4, 11, T38);
+ SET(c, d, a, b, 7, 16, T39);
+ SET(b, c, d, a, 10, 23, T40);
+ SET(a, b, c, d, 13, 4, T41);
+ SET(d, a, b, c, 0, 11, T42);
+ SET(c, d, a, b, 3, 16, T43);
+ SET(b, c, d, a, 6, 23, T44);
+ SET(a, b, c, d, 9, 4, T45);
+ SET(d, a, b, c, 12, 11, T46);
+ SET(c, d, a, b, 15, 16, T47);
+ SET(b, c, d, a, 2, 23, T48);
+#undef SET
+
+ /* Round 4. */
+ /* Let [abcd k s t] denote the operation
+ a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */
+#define I(x, y, z) ((y) ^ ((x) | ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + I(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 0, 6, T49);
+ SET(d, a, b, c, 7, 10, T50);
+ SET(c, d, a, b, 14, 15, T51);
+ SET(b, c, d, a, 5, 21, T52);
+ SET(a, b, c, d, 12, 6, T53);
+ SET(d, a, b, c, 3, 10, T54);
+ SET(c, d, a, b, 10, 15, T55);
+ SET(b, c, d, a, 1, 21, T56);
+ SET(a, b, c, d, 8, 6, T57);
+ SET(d, a, b, c, 15, 10, T58);
+ SET(c, d, a, b, 6, 15, T59);
+ SET(b, c, d, a, 13, 21, T60);
+ SET(a, b, c, d, 4, 6, T61);
+ SET(d, a, b, c, 11, 10, T62);
+ SET(c, d, a, b, 2, 15, T63);
+ SET(b, c, d, a, 9, 21, T64);
+#undef SET
+
+ /* Then perform the following additions. (That is increment each
+ of the four registers by the value it had before this block
+ was started.) */
+ pms->abcd[0] += a;
+ pms->abcd[1] += b;
+ pms->abcd[2] += c;
+ pms->abcd[3] += d;
+}
+
+void
+md5_init(md5_state_t *pms)
+{
+ pms->count[0] = pms->count[1] = 0;
+ pms->abcd[0] = 0x67452301;
+ pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476;
+ pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301;
+ pms->abcd[3] = 0x10325476;
+}
+
+void
+md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes)
+{
+ const md5_byte_t *p = data;
+ int left = nbytes;
+ int offset = (pms->count[0] >> 3) & 63;
+ md5_word_t nbits = (md5_word_t)(nbytes << 3);
+
+ if (nbytes <= 0)
+ return;
+
+ /* Update the message length. */
+ pms->count[1] += nbytes >> 29;
+ pms->count[0] += nbits;
+ if (pms->count[0] < nbits)
+ pms->count[1]++;
+
+ /* Process an initial partial block. */
+ if (offset) {
+ int copy = (offset + nbytes > 64 ? 64 - offset : nbytes);
+
+ memcpy(pms->buf + offset, p, copy);
+ if (offset + copy < 64)
+ return;
+ p += copy;
+ left -= copy;
+ md5_process(pms, pms->buf);
+ }
+
+ /* Process full blocks. */
+ for (; left >= 64; p += 64, left -= 64)
+ md5_process(pms, p);
+
+ /* Process a final partial block. */
+ if (left)
+ memcpy(pms->buf, p, left);
+}
+
+void
+md5_finish(md5_state_t *pms, md5_byte_t digest[16])
+{
+ static const md5_byte_t pad[64] = {
+ 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ md5_byte_t data[8];
+ int i;
+
+ /* Save the length before padding. */
+ for (i = 0; i < 8; ++i)
+ data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3));
+ /* Pad to 56 bytes mod 64. */
+ md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1);
+ /* Append the length. */
+ md5_append(pms, data, 8);
+ for (i = 0; i < 16; ++i)
+ digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3));
+}
diff --git a/jam-files/engine/md5.h b/jam-files/engine/md5.h
new file mode 100644
index 000000000..698c995d8
--- /dev/null
+++ b/jam-files/engine/md5.h
@@ -0,0 +1,91 @@
+/*
+ Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved.
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ L. Peter Deutsch
+ ghost@aladdin.com
+
+ */
+/* $Id: md5.h,v 1.4 2002/04/13 19:20:28 lpd Exp $ */
+/*
+ Independent implementation of MD5 (RFC 1321).
+
+ This code implements the MD5 Algorithm defined in RFC 1321, whose
+ text is available at
+ http://www.ietf.org/rfc/rfc1321.txt
+ The code is derived from the text of the RFC, including the test suite
+ (section A.5) but excluding the rest of Appendix A. It does not include
+ any code or documentation that is identified in the RFC as being
+ copyrighted.
+
+ The original and principal author of md5.h is L. Peter Deutsch
+ <ghost@aladdin.com>. Other authors are noted in the change history
+ that follows (in reverse chronological order):
+
+ 2002-04-13 lpd Removed support for non-ANSI compilers; removed
+ references to Ghostscript; clarified derivation from RFC 1321;
+ now handles byte order either statically or dynamically.
+ 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+ 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5);
+ added conditionalization for C++ compilation from Martin
+ Purschke <purschke@bnl.gov>.
+ 1999-05-03 lpd Original version.
+ */
+
+#ifndef md5_INCLUDED
+# define md5_INCLUDED
+
+/*
+ * This package supports both compile-time and run-time determination of CPU
+ * byte order. If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be
+ * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is
+ * defined as non-zero, the code will be compiled to run only on big-endian
+ * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to
+ * run on either big- or little-endian CPUs, but will run slightly less
+ * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined.
+ */
+
+typedef unsigned char md5_byte_t; /* 8-bit byte */
+typedef unsigned int md5_word_t; /* 32-bit word */
+
+/* Define the state of the MD5 Algorithm. */
+typedef struct md5_state_s {
+ md5_word_t count[2]; /* message length in bits, lsw first */
+ md5_word_t abcd[4]; /* digest buffer */
+ md5_byte_t buf[64]; /* accumulate block */
+} md5_state_t;
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+/* Initialize the algorithm. */
+void md5_init(md5_state_t *pms);
+
+/* Append a string to the message. */
+void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes);
+
+/* Finish the message and return the digest. */
+void md5_finish(md5_state_t *pms, md5_byte_t digest[16]);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif
+
+#endif /* md5_INCLUDED */
diff --git a/jam-files/engine/mem.c b/jam-files/engine/mem.c
new file mode 100644
index 000000000..6a11fb38a
--- /dev/null
+++ b/jam-files/engine/mem.c
@@ -0,0 +1,75 @@
+/*
+Copyright Rene Rivera 2006.
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
+
+#ifdef OPT_BOEHM_GC
+
+ /* Compile the Boehm GC as one big chunk of code. It's much easier
+ this way, than trying to make radical changes to the bjam build
+ scripts. */
+
+ #define ATOMIC_UNCOLLECTABLE
+ #define NO_EXECUTE_PERMISSION
+ #define ALL_INTERIOR_POINTERS
+
+ #define LARGE_CONFIG
+ /*
+ #define NO_SIGNALS
+ #define SILENT
+ */
+ #ifndef GC_DEBUG
+ #define NO_DEBUGGING
+ #endif
+
+ #ifdef __GLIBC__
+ #define __USE_GNU
+ #endif
+
+ #include "boehm_gc/reclaim.c"
+ #include "boehm_gc/allchblk.c"
+ #include "boehm_gc/misc.c"
+ #include "boehm_gc/alloc.c"
+ #include "boehm_gc/mach_dep.c"
+ #include "boehm_gc/os_dep.c"
+ #include "boehm_gc/mark_rts.c"
+ #include "boehm_gc/headers.c"
+ #include "boehm_gc/mark.c"
+ #include "boehm_gc/obj_map.c"
+ #include "boehm_gc/pcr_interface.c"
+ #include "boehm_gc/blacklst.c"
+ #include "boehm_gc/new_hblk.c"
+ #include "boehm_gc/real_malloc.c"
+ #include "boehm_gc/dyn_load.c"
+ #include "boehm_gc/dbg_mlc.c"
+ #include "boehm_gc/malloc.c"
+ #include "boehm_gc/stubborn.c"
+ #include "boehm_gc/checksums.c"
+ #include "boehm_gc/pthread_support.c"
+ #include "boehm_gc/pthread_stop_world.c"
+ #include "boehm_gc/darwin_stop_world.c"
+ #include "boehm_gc/typd_mlc.c"
+ #include "boehm_gc/ptr_chck.c"
+ #include "boehm_gc/mallocx.c"
+ #include "boehm_gc/gcj_mlc.c"
+ #include "boehm_gc/specific.c"
+ #include "boehm_gc/gc_dlopen.c"
+ #include "boehm_gc/backgraph.c"
+ #include "boehm_gc/win32_threads.c"
+
+ /* Needs to be last. */
+ #include "boehm_gc/finalize.c"
+
+#elif defined(OPT_DUMA)
+
+ #ifdef OS_NT
+ #define WIN32
+ #endif
+ #include "duma/duma.c"
+ #include "duma/print.c"
+
+#endif
diff --git a/jam-files/engine/mem.h b/jam-files/engine/mem.h
new file mode 100644
index 000000000..71b2fb4be
--- /dev/null
+++ b/jam-files/engine/mem.h
@@ -0,0 +1,134 @@
+/*
+Copyright Rene Rivera 2006.
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef BJAM_MEM_H
+#define BJAM_MEM_H
+
+
+#ifdef OPT_BOEHM_GC
+
+ /* Use Boehm GC memory allocator. */
+ #include <gc.h>
+ #define bjam_malloc_x(s) memset(GC_malloc(s),0,s)
+ #define bjam_malloc_atomic_x(s) memset(GC_malloc_atomic(s),0,s)
+ #define bjam_calloc_x(n,s) memset(GC_malloc((n)*(s)),0,(n)*(s))
+ #define bjam_calloc_atomic_x(n,s) memset(GC_malloc_atomic((n)*(s)),0,(n)*(s))
+ #define bjam_realloc_x(p,s) GC_realloc(p,s)
+ #define bjam_free_x(p) GC_free(p)
+ #define bjam_mem_init_x() GC_init(); GC_enable_incremental()
+
+ #define bjam_malloc_raw_x(s) malloc(s)
+ #define bjam_calloc_raw_x(n,s) calloc(n,s)
+ #define bjam_realloc_raw_x(p,s) realloc(p,s)
+ #define bjam_free_raw_x(p) free(p)
+
+ #ifndef BJAM_NEWSTR_NO_ALLOCATE
+ #define BJAM_NEWSTR_NO_ALLOCATE
+ #endif
+
+#elif defined(OPT_DUMA)
+
+ /* Use Duma memory debugging library. */
+ #include <stdlib.h>
+ #define _DUMA_CONFIG_H_
+ #define DUMA_NO_GLOBAL_MALLOC_FREE
+ #define DUMA_EXPLICIT_INIT
+ #define DUMA_NO_THREAD_SAFETY
+ #define DUMA_NO_CPP_SUPPORT
+ /* #define DUMA_NO_LEAKDETECTION */
+ /* #define DUMA_USE_FRAMENO */
+ /* #define DUMA_PREFER_ATEXIT */
+ /* #define DUMA_OLD_DEL_MACRO */
+ /* #define DUMA_NO_HANG_MSG */
+ #define DUMA_PAGE_SIZE 4096
+ #define DUMA_MIN_ALIGNMENT 1
+ /* #define DUMA_GNU_INIT_ATTR 0 */
+ typedef unsigned int DUMA_ADDR;
+ typedef unsigned int DUMA_SIZE;
+ #include <duma.h>
+ #define bjam_malloc_x(s) malloc(s)
+ #define bjam_calloc_x(n,s) calloc(n,s)
+ #define bjam_realloc_x(p,s) realloc(p,s)
+ #define bjam_free_x(p) free(p)
+
+ #ifndef BJAM_NEWSTR_NO_ALLOCATE
+ #define BJAM_NEWSTR_NO_ALLOCATE
+ #endif
+
+#else
+
+ /* Standard C memory allocation. */
+ #define bjam_malloc_x(s) malloc(s)
+ #define bjam_calloc_x(n,s) calloc(n,s)
+ #define bjam_realloc_x(p,s) realloc(p,s)
+ #define bjam_free_x(p) free(p)
+
+#endif
+
+#ifndef bjam_malloc_atomic_x
+ #define bjam_malloc_atomic_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_atomic_x
+ #define bjam_calloc_atomic_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_mem_init_x
+ #define bjam_mem_init_x()
+#endif
+#ifndef bjam_mem_close_x
+ #define bjam_mem_close_x()
+#endif
+#ifndef bjam_malloc_raw_x
+ #define bjam_malloc_raw_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_raw_x
+ #define bjam_calloc_raw_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_realloc_raw_x
+ #define bjam_realloc_raw_x(p,s) bjam_realloc_x(p,s)
+#endif
+#ifndef bjam_free_raw_x
+ #define bjam_free_raw_x(p) bjam_free_x(p)
+#endif
+
+#ifdef OPT_DEBUG_PROFILE
+
+ /* Profile tracing of memory allocations. */
+ #define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s))
+ #define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s))
+ #define BJAM_CALLOC(n,s) (profile_memory(n*s), bjam_calloc_x(n,s))
+ #define BJAM_CALLOC_ATOMIC(n,s) (profile_memory(n*s), bjam_calloc_atomic_x(n,s))
+ #define BJAM_REALLOC(p,s) (profile_memory(s), bjam_realloc_x(p,s))
+ #define BJAM_FREE(p) bjam_free_x(p)
+ #define BJAM_MEM_INIT() bjam_mem_init_x()
+ #define BJAM_MEM_CLOSE() bjam_mem_close_x()
+
+ #define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s))
+ #define BJAM_CALLOC_RAW(n,s) (profile_memory(n*s), bjam_calloc_raw_x(n,s))
+ #define BJAM_REALLOC_RAW(p,s) (profile_memory(s), bjam_realloc_raw_x(p,s))
+ #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
+
+#else
+
+ /* No mem tracing. */
+ #define BJAM_MALLOC(s) bjam_malloc_x(s)
+ #define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s)
+ #define BJAM_CALLOC(n,s) bjam_calloc_x(n,s)
+ #define BJAM_CALLOC_ATOMIC(n,s) bjam_calloc_atomic_x(n,s)
+ #define BJAM_REALLOC(p,s) bjam_realloc_x(p,s)
+ #define BJAM_FREE(p) bjam_free_x(p)
+ #define BJAM_MEM_INIT() bjam_mem_init_x()
+ #define BJAM_MEM_CLOSE() bjam_mem_close_x()
+
+ #define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s)
+ #define BJAM_CALLOC_RAW(n,s) bjam_calloc_raw_x(n,s)
+ #define BJAM_REALLOC_RAW(p,s) bjam_realloc_raw_x(p,s)
+ #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
+
+#endif
+
+
+#endif
diff --git a/jam-files/engine/mkjambase.c b/jam-files/engine/mkjambase.c
new file mode 100644
index 000000000..cdf599820
--- /dev/null
+++ b/jam-files/engine/mkjambase.c
@@ -0,0 +1,123 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * mkjambase.c - turn Jambase into a big C structure
+ *
+ * Usage: mkjambase jambase.c Jambase ...
+ *
+ * Results look like this:
+ *
+ * char *jambase[] = {
+ * "...\n",
+ * ...
+ * 0 };
+ *
+ * Handles \'s and "'s specially; knows to delete blank and comment lines.
+ *
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+
+int main( int argc, char * * argv, char * * envp )
+{
+ char buf[ 1024 ];
+ FILE * fin;
+ FILE * fout;
+ char * p;
+ int doDotC = 0;
+
+ if ( argc < 3 )
+ {
+ fprintf( stderr, "usage: %s jambase.c Jambase ...\n", argv[ 0 ] );
+ return -1;
+ }
+
+ if ( !( fout = fopen( argv[1], "w" ) ) )
+ {
+ perror( argv[ 1 ] );
+ return -1;
+ }
+
+ /* If the file ends in .c generate a C source file. */
+ if ( ( p = strrchr( argv[1], '.' ) ) && !strcmp( p, ".c" ) )
+ doDotC++;
+
+ /* Now process the files. */
+
+ argc -= 2;
+ argv += 2;
+
+ if ( doDotC )
+ {
+ fprintf( fout, "/* Generated by mkjambase from Jambase */\n" );
+ fprintf( fout, "char *jambase[] = {\n" );
+ }
+
+ for ( ; argc--; ++argv )
+ {
+ if ( !( fin = fopen( *argv, "r" ) ) )
+ {
+ perror( *argv );
+ return -1;
+ }
+
+ if ( doDotC )
+ fprintf( fout, "/* %s */\n", *argv );
+ else
+ fprintf( fout, "### %s ###\n", *argv );
+
+ while ( fgets( buf, sizeof( buf ), fin ) )
+ {
+ if ( doDotC )
+ {
+ char * p = buf;
+
+ /* Strip leading whitespace. */
+ while ( ( *p == ' ' ) || ( *p == '\t' ) || ( *p == '\n' ) )
+ ++p;
+
+ /* Drop comments and empty lines. */
+ if ( ( *p == '#' ) || !*p )
+ continue;
+
+ /* Copy. */
+ putc( '"', fout );
+ for ( ; *p && ( *p != '\n' ); ++p )
+ switch ( *p )
+ {
+ case '\\': putc( '\\', fout ); putc( '\\', fout ); break;
+ case '"' : putc( '\\', fout ); putc( '"' , fout ); break;
+ case '\r': break;
+ default: putc( *p, fout ); break;
+ }
+
+ fprintf( fout, "\\n\",\n" );
+ }
+ else
+ {
+ fprintf( fout, "%s", buf );
+ }
+ }
+
+ fclose( fin );
+ }
+
+ if ( doDotC )
+ fprintf( fout, "0 };\n" );
+
+ fclose( fout );
+
+ return 0;
+}
diff --git a/jam-files/engine/modules.c b/jam-files/engine/modules.c
new file mode 100644
index 000000000..729525941
--- /dev/null
+++ b/jam-files/engine/modules.c
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+#include "jam.h"
+
+#include "modules.h"
+#include "string.h"
+#include "hash.h"
+#include "newstr.h"
+#include "lists.h"
+#include "parse.h"
+#include "rules.h"
+#include "variable.h"
+#include "strings.h"
+#include <assert.h>
+
+static struct hash * module_hash = 0;
+
+
+static char * new_module_str( module_t * m, char * suffix )
+{
+ char * result;
+ string s;
+ string_copy( &s, m->name );
+ string_append( &s, suffix );
+ result = newstr( s.value );
+ string_free( &s );
+ return result;
+}
+
+
+module_t * bindmodule( char * name )
+{
+ PROFILE_ENTER( BINDMODULE );
+
+ string s;
+ module_t m_;
+ module_t * m = &m_;
+
+ if ( !module_hash )
+ module_hash = hashinit( sizeof( module_t ), "modules" );
+
+ string_new( &s );
+ if ( name )
+ {
+ string_append( &s, name );
+ string_push_back( &s, '.' );
+ }
+
+ m->name = s.value;
+
+ if ( hashenter( module_hash, (HASHDATA * *)&m ) )
+ {
+ m->name = newstr( m->name );
+ m->variables = 0;
+ m->rules = 0;
+ m->imported_modules = 0;
+ m->class_module = 0;
+ m->native_rules = 0;
+ m->user_module = 0;
+ }
+ string_free( &s );
+
+ PROFILE_EXIT( BINDMODULE );
+
+ return m;
+}
+
+/*
+ * demand_rules() - Get the module's "rules" hash on demand.
+ */
+struct hash * demand_rules( module_t * m )
+{
+ if ( !m->rules )
+ m->rules = hashinit( sizeof( RULE ), new_module_str( m, "rules" ) );
+ return m->rules;
+}
+
+
+/*
+ * delete_module() - wipe out the module's rules and variables.
+ */
+
+static void delete_rule_( void * xrule, void * data )
+{
+ rule_free( (RULE *)xrule );
+}
+
+
+void delete_module( module_t * m )
+{
+ /* Clear out all the rules. */
+ if ( m->rules )
+ {
+ hashenumerate( m->rules, delete_rule_, (void *)0 );
+ hashdone( m->rules );
+ m->rules = 0;
+ }
+
+ if ( m->variables )
+ {
+ var_hash_swap( &m->variables );
+ var_done();
+ var_hash_swap( &m->variables );
+ m->variables = 0;
+ }
+}
+
+
+module_t * root_module()
+{
+ static module_t * root = 0;
+ if ( !root )
+ root = bindmodule( 0 );
+ return root;
+}
+
+void enter_module( module_t * m )
+{
+ var_hash_swap( &m->variables );
+}
+
+
+void exit_module( module_t * m )
+{
+ var_hash_swap( &m->variables );
+}
+
+
+void import_module( LIST * module_names, module_t * target_module )
+{
+ PROFILE_ENTER( IMPORT_MODULE );
+
+ struct hash * h;
+
+ if ( !target_module->imported_modules )
+ target_module->imported_modules = hashinit( sizeof( char * ), "imported" );
+ h = target_module->imported_modules;
+
+ for ( ; module_names; module_names = module_names->next )
+ {
+ char * s = module_names->string;
+ char * * ss = &s;
+ hashenter( h, (HASHDATA * *)&ss );
+ }
+
+ PROFILE_EXIT( IMPORT_MODULE );
+}
+
+
+static void add_module_name( void * r_, void * result_ )
+{
+ char * * r = (char * *)r_;
+ LIST * * result = (LIST * *)result_;
+
+ *result = list_new( *result, copystr( *r ) );
+}
+
+
+LIST * imported_modules( module_t * module )
+{
+ LIST * result = L0;
+ if ( module->imported_modules )
+ hashenumerate( module->imported_modules, add_module_name, &result );
+ return result;
+}
diff --git a/jam-files/engine/modules.h b/jam-files/engine/modules.h
new file mode 100644
index 000000000..60053a239
--- /dev/null
+++ b/jam-files/engine/modules.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+#ifndef MODULES_DWA10182001_H
+# define MODULES_DWA10182001_H
+
+#include "lists.h"
+
+struct module_t
+{
+ char* name;
+ struct hash* rules;
+ struct hash* variables;
+ struct hash* imported_modules;
+ struct module_t* class_module;
+ struct hash* native_rules;
+ int user_module;
+};
+
+typedef struct module_t module_t ; /* MSVC debugger gets confused unless this is provided */
+
+module_t* bindmodule( char* name );
+module_t* root_module();
+void enter_module( module_t* );
+void exit_module( module_t* );
+void delete_module( module_t* );
+
+void import_module(LIST* module_names, module_t* target_module);
+LIST* imported_modules(module_t* module);
+
+struct hash* demand_rules( module_t* );
+
+
+#endif
+
diff --git a/jam-files/engine/modules/order.c b/jam-files/engine/modules/order.c
new file mode 100644
index 000000000..d77943a79
--- /dev/null
+++ b/jam-files/engine/modules/order.c
@@ -0,0 +1,144 @@
+/* Copyright Vladimir Prus 2004. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+#include "../lists.h"
+#include "../strings.h"
+#include "../newstr.h"
+#include "../variable.h"
+
+
+/* Use quite klugy approach: when we add order dependency from 'a' to 'b',
+ just append 'b' to of value of variable 'a'.
+*/
+LIST *add_pair( PARSE *parse, FRAME *frame )
+{
+ LIST* arg = lol_get( frame->args, 0 );
+
+ var_set(arg->string, list_copy(0, arg->next), VAR_APPEND);
+
+ return L0;
+}
+
+/** Given a list and a value, returns position of that value in
+ the list, or -1 if not found.
+*/
+int list_index(LIST* list, const char* value)
+{
+ int result = 0;
+ for(; list; list = list->next, ++result) {
+ if (strcmp(list->string, value) == 0)
+ return result;
+ }
+ return -1;
+}
+
+enum colors { white, gray, black };
+
+/* Main routite of topological sort. Calls itself recursively on all
+ adjacent vertices which were not yet visited. After that, 'current_vertex'
+ is added to '*result_ptr'.
+*/
+void do_ts(int** graph, int current_vertex, int* colors, int** result_ptr)
+{
+ int i;
+
+ colors[current_vertex] = gray;
+ for(i = 0; graph[current_vertex][i] != -1; ++i) {
+ int adjacent_vertex = graph[current_vertex][i];
+
+ if (colors[adjacent_vertex] == white)
+ do_ts(graph, adjacent_vertex, colors, result_ptr);
+ /* The vertex is either black, in which case we don't have to do
+ anything, a gray, in which case we have a loop. If we have a loop,
+ it's not clear what useful diagnostic we can emit, so we emit
+ nothing. */
+ }
+ colors[current_vertex] = black;
+ **result_ptr = current_vertex;
+ (*result_ptr)++;
+}
+
+void topological_sort(int** graph, int num_vertices, int* result)
+{
+ int i;
+ int* colors = (int*)BJAM_CALLOC(num_vertices, sizeof(int));
+ for (i = 0; i < num_vertices; ++i)
+ colors[i] = white;
+
+ for(i = 0; i < num_vertices; ++i)
+ if (colors[i] == white)
+ do_ts(graph, i, colors, &result);
+
+ BJAM_FREE(colors);
+}
+
+LIST *order( PARSE *parse, FRAME *frame )
+{
+ LIST* arg = lol_get( frame->args, 0 );
+ LIST* tmp;
+ LIST* result = 0;
+ int src;
+
+ /* We need to create a graph of order dependencies between
+ the passed objects. We assume that there are no duplicates
+ passed to 'add_pair'.
+ */
+ int length = list_length(arg);
+ int** graph = (int**)BJAM_CALLOC(length, sizeof(int*));
+ int* order = (int*)BJAM_MALLOC((length+1)*sizeof(int));
+
+ for(tmp = arg, src = 0; tmp; tmp = tmp->next, ++src) {
+ /* For all object this one depend upon, add elements
+ to 'graph' */
+ LIST* dependencies = var_get(tmp->string);
+ int index = 0;
+
+ graph[src] = (int*)BJAM_CALLOC(list_length(dependencies)+1, sizeof(int));
+ for(; dependencies; dependencies = dependencies->next) {
+ int dst = list_index(arg, dependencies->string);
+ if (dst != -1)
+ graph[src][index++] = dst;
+ }
+ graph[src][index] = -1;
+ }
+
+ topological_sort(graph, length, order);
+
+ {
+ int index = length-1;
+ for(; index >= 0; --index) {
+ int i;
+ tmp = arg;
+ for (i = 0; i < order[index]; ++i, tmp = tmp->next);
+ result = list_new(result, tmp->string);
+ }
+ }
+
+ /* Clean up */
+ {
+ int i;
+ for(i = 0; i < length; ++i)
+ BJAM_FREE(graph[i]);
+ BJAM_FREE(graph);
+ BJAM_FREE(order);
+ }
+
+ return result;
+}
+
+void init_order()
+{
+ {
+ char* args[] = { "first", "second", 0 };
+ declare_native_rule("class@order", "add-pair", args, add_pair, 1);
+ }
+
+ {
+ char* args[] = { "objects", "*", 0 };
+ declare_native_rule("class@order", "order", args, order, 1);
+ }
+
+
+}
diff --git a/jam-files/engine/modules/path.c b/jam-files/engine/modules/path.c
new file mode 100644
index 000000000..f5d096224
--- /dev/null
+++ b/jam-files/engine/modules/path.c
@@ -0,0 +1,32 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+#include "../timestamp.h"
+#include "../newstr.h"
+
+LIST *path_exists( PARSE *parse, FRAME *frame )
+{
+ LIST* l = lol_get( frame->args, 0 );
+
+ time_t time;
+ timestamp(l->string, &time);
+ if (time != 0)
+ {
+ return list_new(0, newstr("true"));
+ }
+ else
+ {
+ return L0;
+ }
+}
+
+void init_path()
+{
+ {
+ char* args[] = { "location", 0 };
+ declare_native_rule("path", "exists", args, path_exists, 1);
+ }
+
+}
diff --git a/jam-files/engine/modules/property-set.c b/jam-files/engine/modules/property-set.c
new file mode 100644
index 000000000..2b0fb5d97
--- /dev/null
+++ b/jam-files/engine/modules/property-set.c
@@ -0,0 +1,110 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+#include "../timestamp.h"
+#include "../newstr.h"
+#include "../strings.h"
+#include "../lists.h"
+#include "../variable.h"
+#include "../compile.h"
+
+LIST* get_grist(char* f)
+{
+ char* end = strchr(f, '>');
+ string s[1];
+ LIST* result;
+
+ string_new(s);
+
+ string_append_range(s, f, end+1);
+ result = list_new(0, newstr(s->value));
+
+ string_free(s);
+ return result;
+}
+
+/*
+rule create ( raw-properties * )
+{
+ raw-properties = [ sequence.unique
+ [ sequence.insertion-sort $(raw-properties) ] ] ;
+
+ local key = $(raw-properties:J=-:E=) ;
+
+ if ! $(.ps.$(key))
+ {
+ .ps.$(key) = [ new property-set $(raw-properties) ] ;
+ }
+ return $(.ps.$(key)) ;
+}
+*/
+
+LIST *property_set_create( PARSE *parse, FRAME *frame )
+{
+ LIST* properties = lol_get( frame->args, 0 );
+ LIST* sorted = 0;
+#if 0
+ LIST* order_sensitive = 0;
+#endif
+ LIST* unique;
+ LIST* tmp;
+ LIST* val;
+ string var[1];
+
+#if 0
+ /* Sort all properties which are not order sensitive */
+ for(tmp = properties; tmp; tmp = tmp->next) {
+ LIST* g = get_grist(tmp->string);
+ LIST* att = call_rule("feature.attributes", frame, g, 0);
+ if (list_in(att, "order-sensitive")) {
+ order_sensitive = list_new( order_sensitive, tmp->string);
+ } else {
+ sorted = list_new( sorted, tmp->string);
+ }
+ list_free(att);
+ }
+
+ sorted = list_sort(sorted);
+ sorted = list_append(sorted, order_sensitive);
+ unique = list_unique(sorted);
+#endif
+ sorted = list_sort(properties);
+ unique = list_unique(sorted);
+
+ string_new(var);
+ string_append(var, ".ps.");
+
+ for(tmp = unique; tmp; tmp = tmp->next) {
+ string_append(var, tmp->string);
+ string_push_back(var, '-');
+ }
+ val = var_get(var->value);
+ if (val == 0)
+ {
+ val = call_rule("new", frame,
+ list_append(list_new(0, "property-set"), unique), 0);
+
+ var_set(newstr(var->value), list_copy(0, val), VAR_SET);
+ }
+ else
+ {
+ val = list_copy(0, val);
+ }
+
+ string_free(var);
+ /* The 'unique' variable is freed in 'call_rule'. */
+ list_free(sorted);
+
+ return val;
+
+}
+
+void init_property_set()
+{
+ {
+ char* args[] = { "raw-properties", "*", 0 };
+ declare_native_rule("property-set", "create", args, property_set_create, 1);
+ }
+}
diff --git a/jam-files/engine/modules/readme.txt b/jam-files/engine/modules/readme.txt
new file mode 100644
index 000000000..2edf6e17f
--- /dev/null
+++ b/jam-files/engine/modules/readme.txt
@@ -0,0 +1,3 @@
+
+This directory constains sources which declare native
+rules for Boost.Build modules. \ No newline at end of file
diff --git a/jam-files/engine/modules/regex.c b/jam-files/engine/modules/regex.c
new file mode 100644
index 000000000..d048ba1de
--- /dev/null
+++ b/jam-files/engine/modules/regex.c
@@ -0,0 +1,96 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+#include "../timestamp.h"
+#include "../newstr.h"
+#include "../strings.h"
+#include "../regexp.h"
+#include "../compile.h"
+
+/*
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+*/
+LIST *regex_transform( PARSE *parse, FRAME *frame )
+{
+ LIST* l = lol_get( frame->args, 0 );
+ LIST* pattern = lol_get( frame->args, 1 );
+ LIST* indices_list = lol_get(frame->args, 2);
+ int* indices = 0;
+ int size;
+ int* p;
+ LIST* result = 0;
+
+ string buf[1];
+ string_new(buf);
+
+ if (indices_list)
+ {
+ size = list_length(indices_list);
+ indices = (int*)BJAM_MALLOC(size*sizeof(int));
+ for(p = indices; indices_list; indices_list = indices_list->next)
+ {
+ *p++ = atoi(indices_list->string);
+ }
+ }
+ else
+ {
+ size = 1;
+ indices = (int*)BJAM_MALLOC(sizeof(int));
+ *indices = 1;
+ }
+
+ {
+ /* Result is cached and intentionally never freed */
+ regexp *re = regex_compile( pattern->string );
+
+ for(; l; l = l->next)
+ {
+ if( regexec( re, l->string ) )
+ {
+ int i = 0;
+ for(; i < size; ++i)
+ {
+ int index = indices[i];
+ /* Skip empty submatches. Not sure it's right in all cases,
+ but surely is right for the case for which this routine
+ is optimized -- header scanning.
+ */
+ if (re->startp[index] != re->endp[index])
+ {
+ string_append_range( buf, re->startp[index], re->endp[index] );
+ result = list_new( result, newstr( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+ string_free( buf );
+ }
+
+ BJAM_FREE(indices);
+
+ return result;
+}
+
+void init_regex()
+{
+ {
+ char* args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 };
+ declare_native_rule("regex", "transform", args, regex_transform, 2);
+ }
+}
diff --git a/jam-files/engine/modules/sequence.c b/jam-files/engine/modules/sequence.c
new file mode 100644
index 000000000..bda80d94c
--- /dev/null
+++ b/jam-files/engine/modules/sequence.c
@@ -0,0 +1,42 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+
+# ifndef max
+# define max( a,b ) ((a)>(b)?(a):(b))
+# endif
+
+
+LIST *sequence_select_highest_ranked( PARSE *parse, FRAME *frame )
+{
+ /* Returns all of 'elements' for which corresponding element in parallel */
+ /* list 'rank' is equal to the maximum value in 'rank'. */
+
+ LIST* elements = lol_get( frame->args, 0 );
+ LIST* rank = lol_get( frame->args, 1 );
+
+ LIST* result = 0;
+ LIST* tmp;
+ int highest_rank = -1;
+
+ for (tmp = rank; tmp; tmp = tmp->next)
+ highest_rank = max(highest_rank, atoi(tmp->string));
+
+ for (; rank; rank = rank->next, elements = elements->next)
+ if (atoi(rank->string) == highest_rank)
+ result = list_new(result, elements->string);
+
+ return result;
+}
+
+void init_sequence()
+{
+ {
+ char* args[] = { "elements", "*", ":", "rank", "*", 0 };
+ declare_native_rule("sequence", "select-highest-ranked", args,
+ sequence_select_highest_ranked, 1);
+ }
+
+}
diff --git a/jam-files/engine/modules/set.c b/jam-files/engine/modules/set.c
new file mode 100644
index 000000000..f8219403c
--- /dev/null
+++ b/jam-files/engine/modules/set.c
@@ -0,0 +1,41 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+
+/*
+ local result = ;
+ local element ;
+ for element in $(B)
+ {
+ if ! ( $(element) in $(A) )
+ {
+ result += $(element) ;
+ }
+ }
+ return $(result) ;
+*/
+LIST *set_difference( PARSE *parse, FRAME *frame )
+{
+
+ LIST* b = lol_get( frame->args, 0 );
+ LIST* a = lol_get( frame->args, 1 );
+
+ LIST* result = 0;
+ for(; b; b = b->next)
+ {
+ if (!list_in(a, b->string))
+ result = list_new(result, b->string);
+ }
+ return result;
+}
+
+void init_set()
+{
+ {
+ char* args[] = { "B", "*", ":", "A", "*", 0 };
+ declare_native_rule("set", "difference", args, set_difference, 1);
+ }
+
+}
diff --git a/jam-files/engine/native.c b/jam-files/engine/native.c
new file mode 100644
index 000000000..4c2899595
--- /dev/null
+++ b/jam-files/engine/native.c
@@ -0,0 +1,36 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "native.h"
+#include "hash.h"
+
+# define P0 (PARSE *)0
+# define C0 (char *)0
+
+
+void declare_native_rule(char* module, char* rule, char** args,
+ LIST*(*f)(PARSE*, FRAME*), int version)
+{
+ module_t* m = bindmodule(module);
+ if (m->native_rules == 0) {
+ m->native_rules = hashinit( sizeof( native_rule_t ), "native rules");
+ }
+
+ {
+ native_rule_t n, *np = &n;
+ n.name = rule;
+ if (args)
+ {
+ n.arguments = args_new();
+ lol_build( n.arguments->data, args );
+ }
+ else
+ {
+ n.arguments = 0;
+ }
+ n.procedure = parse_make( f, P0, P0, P0, C0, C0, 0 );
+ n.version = version;
+ hashenter(m->native_rules, (HASHDATA**)&np);
+ }
+}
diff --git a/jam-files/engine/native.h b/jam-files/engine/native.h
new file mode 100644
index 000000000..3fc710b9c
--- /dev/null
+++ b/jam-files/engine/native.h
@@ -0,0 +1,34 @@
+/* Copyright David Abrahams 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#ifndef NATIVE_H_VP_2003_12_09
+#define NATIVE_H_VP_2003_12_09
+
+#include "rules.h"
+
+struct native_rule_t
+{
+ char* name;
+ argument_list* arguments;
+ PARSE* procedure;
+ /* Version of the interface that the native rule provides.
+ It's possible that we want to change the set parameter
+ for existing native rule. In that case, version number
+ should be incremented so that Boost.Build can check for
+ version it relies on.
+
+ Versions are numbered from 1.
+ */
+ int version;
+};
+
+/* MSVC debugger gets confused unless this is provided */
+typedef struct native_rule_t native_rule_t ;
+
+void declare_native_rule(char* module, char* rule, char** args,
+ LIST*(*f)(PARSE*, FRAME*), int version);
+
+
+
+#endif
diff --git a/jam-files/engine/newstr.c b/jam-files/engine/newstr.c
new file mode 100644
index 000000000..6a229eb21
--- /dev/null
+++ b/jam-files/engine/newstr.c
@@ -0,0 +1,174 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "newstr.h"
+# include "hash.h"
+# include "compile.h"
+# include <stddef.h>
+# include <stdlib.h>
+
+/*
+ * newstr.c - string manipulation routines
+ *
+ * To minimize string copying, string creation, copying, and freeing
+ * is done through newstr.
+ *
+ * External functions:
+ *
+ * newstr() - return a dynamically allocated copy of a string
+ * copystr() - return a copy of a string previously returned by newstr()
+ * freestr() - free a string returned by newstr() or copystr()
+ * str_done() - free string tables
+ *
+ * Once a string is passed to newstr(), the returned string is readonly.
+ *
+ * This implementation builds a hash table of all strings, so that multiple
+ * calls of newstr() on the same string allocate memory for the string once.
+ * Strings are never actually freed.
+ */
+
+typedef char * STRING;
+
+static struct hash * strhash = 0;
+static int strtotal = 0;
+static int strcount_in = 0;
+static int strcount_out = 0;
+
+
+/*
+ * Immortal string allocator implementation speeds string allocation and cuts
+ * down on internal fragmentation.
+ */
+
+# define STRING_BLOCK 4096
+typedef struct strblock
+{
+ struct strblock * next;
+ char data[STRING_BLOCK];
+} strblock;
+
+static strblock * strblock_chain = 0;
+
+/* Storage remaining in the current strblock */
+static char * storage_start = 0;
+static char * storage_finish = 0;
+
+
+/*
+ * allocate() - Allocate n bytes of immortal string storage.
+ */
+
+static char * allocate( size_t const n )
+{
+#ifdef BJAM_NEWSTR_NO_ALLOCATE
+ return (char*)BJAM_MALLOC_ATOMIC(n);
+#else
+ /* See if we can grab storage from an existing block. */
+ size_t remaining = storage_finish - storage_start;
+ if ( remaining >= n )
+ {
+ char * result = storage_start;
+ storage_start += n;
+ return result;
+ }
+ else /* Must allocate a new block. */
+ {
+ strblock * new_block;
+ size_t nalloc = n;
+ if ( nalloc < STRING_BLOCK )
+ nalloc = STRING_BLOCK;
+
+ /* Allocate a new block and link into the chain. */
+ new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[0] ) + nalloc * sizeof( new_block->data[0] ) );
+ if ( new_block == 0 )
+ return 0;
+ new_block->next = strblock_chain;
+ strblock_chain = new_block;
+
+ /* Take future allocations out of the larger remaining space. */
+ if ( remaining < nalloc - n )
+ {
+ storage_start = new_block->data + n;
+ storage_finish = new_block->data + nalloc;
+ }
+ return new_block->data;
+ }
+#endif
+}
+
+
+/*
+ * newstr() - return a dynamically allocated copy of a string.
+ */
+
+char * newstr( char * string )
+{
+ STRING str;
+ STRING * s = &str;
+
+ if ( !strhash )
+ strhash = hashinit( sizeof( STRING ), "strings" );
+
+ *s = string;
+
+ if ( hashenter( strhash, (HASHDATA **)&s ) )
+ {
+ int l = strlen( string );
+ char * m = (char *)allocate( l + 1 );
+
+ strtotal += l + 1;
+ memcpy( m, string, l + 1 );
+ *s = m;
+ }
+
+ strcount_in += 1;
+ return *s;
+}
+
+
+/*
+ * copystr() - return a copy of a string previously returned by newstr()
+ */
+
+char * copystr( char * s )
+{
+ strcount_in += 1;
+ return s;
+}
+
+
+/*
+ * freestr() - free a string returned by newstr() or copystr()
+ */
+
+void freestr( char * s )
+{
+ strcount_out += 1;
+}
+
+
+/*
+ * str_done() - free string tables.
+ */
+
+void str_done()
+{
+ /* Reclaim string blocks. */
+ while ( strblock_chain != 0 )
+ {
+ strblock * n = strblock_chain->next;
+ BJAM_FREE(strblock_chain);
+ strblock_chain = n;
+ }
+
+ hashdone( strhash );
+
+ if ( DEBUG_MEM )
+ printf( "%dK in strings\n", strtotal / 1024 );
+
+ /* printf( "--- %d strings of %d dangling\n", strcount_in-strcount_out, strcount_in ); */
+}
diff --git a/jam-files/engine/newstr.h b/jam-files/engine/newstr.h
new file mode 100644
index 000000000..84a4d7b6d
--- /dev/null
+++ b/jam-files/engine/newstr.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * newstr.h - string manipulation routines
+ */
+
+char * copystr ( char * );
+void freestr ( char * );
+char * newstr ( char * );
+void str_done();
diff --git a/jam-files/engine/option.c b/jam-files/engine/option.c
new file mode 100644
index 000000000..d25e5e8ad
--- /dev/null
+++ b/jam-files/engine/option.c
@@ -0,0 +1,94 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "option.h"
+
+/*
+ * option.c - command line option processing
+ *
+ * {o >o
+ * \<>) "Process command line options as defined in <option.h>.
+ * Return the number of argv[] elements used up by options,
+ * or -1 if an invalid option flag was given or an argument
+ * was supplied for an option that does not require one."
+ */
+
+int getoptions( int argc, char * * argv, char * opts, bjam_option * optv )
+{
+ int i;
+ int optc = N_OPTS;
+
+ memset( (char *)optv, '\0', sizeof( *optv ) * N_OPTS );
+
+ for ( i = 0; i < argc; ++i )
+ {
+ char *arg;
+
+ if ( ( argv[ i ][ 0 ] != '-' ) ||
+ ( ( argv[ i ][ 1 ] != '-' ) && !isalpha( argv[ i ][ 1 ] ) ) )
+ continue;
+
+ if ( !optc-- )
+ {
+ printf( "too many options (%d max)\n", N_OPTS );
+ return -1;
+ }
+
+ for ( arg = &argv[ i ][ 1 ]; *arg; ++arg )
+ {
+ char * f;
+
+ for ( f = opts; *f; ++f )
+ if ( *f == *arg )
+ break;
+
+ if ( !*f )
+ {
+ printf( "Invalid option: -%c\n", *arg );
+ return -1;
+ }
+
+ optv->flag = *f;
+
+ if ( f[ 1 ] != ':' )
+ {
+ optv++->val = "true";
+ }
+ else if ( arg[ 1 ] )
+ {
+ optv++->val = &arg[1];
+ break;
+ }
+ else if ( ++i < argc )
+ {
+ optv++->val = argv[ i ];
+ break;
+ }
+ else
+ {
+ printf( "option: -%c needs argument\n", *f );
+ return -1;
+ }
+ }
+ }
+
+ return i;
+}
+
+
+/*
+ * Name: getoptval() - find an option given its character.
+ */
+
+char * getoptval( bjam_option * optv, char opt, int subopt )
+{
+ int i;
+ for ( i = 0; i < N_OPTS; ++i, ++optv )
+ if ( ( optv->flag == opt ) && !subopt-- )
+ return optv->val;
+ return 0;
+}
diff --git a/jam-files/engine/option.h b/jam-files/engine/option.h
new file mode 100644
index 000000000..99ef620da
--- /dev/null
+++ b/jam-files/engine/option.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * option.h - command line option processing
+ *
+ * {o >o
+ * \ -) "Command line option."
+ */
+
+typedef struct bjam_option
+{
+ char flag; /* filled in by getoption() */
+ char *val; /* set to random address if true */
+} bjam_option;
+
+# define N_OPTS 256
+
+int getoptions( int argc, char **argv, char *opts, bjam_option *optv );
+char * getoptval( bjam_option *optv, char opt, int subopt );
diff --git a/jam-files/engine/output.c b/jam-files/engine/output.c
new file mode 100644
index 000000000..483c6ca9e
--- /dev/null
+++ b/jam-files/engine/output.c
@@ -0,0 +1,125 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
+#include "output.h"
+#include "newstr.h"
+#include <stdio.h>
+
+#define bjam_out (stdout)
+#define bjam_err (stderr)
+
+static void out_
+(
+ char const * data,
+ FILE * io
+)
+{
+ while ( *data )
+ {
+ size_t len = strcspn(data,"\r");
+ data += fwrite(data,1,len,io);
+ if ( *data == '\r' ) ++data;
+ }
+}
+
+
+void out_action
+(
+ char const * action,
+ char const * target,
+ char const * command,
+ char const * out_data,
+ char const * err_data,
+ int exit_reason
+)
+{
+ /* Print out the action+target line, if the action is quite the action
+ * should be null.
+ */
+ if ( action )
+ {
+ fprintf( bjam_out, "%s %s\n", action, target );
+ }
+
+ /* Print out the command executed if given -d+2. */
+ if ( DEBUG_EXEC )
+ {
+ fputs( command, bjam_out );
+ fputc( '\n', bjam_out );
+ }
+
+ /* Print out the command executed to the command stream. */
+ if ( globs.cmdout )
+ {
+ fputs( command, globs.cmdout );
+ }
+
+ switch ( exit_reason )
+ {
+ case EXIT_OK:
+ break;
+ case EXIT_FAIL:
+ break;
+ case EXIT_TIMEOUT:
+ {
+ /* Process expired, make user aware with explicit message. */
+ if ( action )
+ {
+ /* But only output for non-quietly actions. */
+ fprintf( bjam_out, "%ld second time limit exceeded\n", globs.timeout );
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* Print out the command output, if requested, or if the program failed. */
+ if ( action || exit_reason != EXIT_OK)
+ {
+ /* But only output for non-quietly actions. */
+ if ( ( 0 != out_data ) &&
+ ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) ||
+ ( globs.pipe_action == 0 ) ) )
+ {
+ out_( out_data, bjam_out );
+ }
+ if ( ( 0 != err_data ) &&
+ ( globs.pipe_action & 2 /* STDERR_FILENO */ ) )
+ {
+ out_( err_data, bjam_err );
+ }
+ }
+
+ fflush( bjam_out );
+ fflush( bjam_err );
+ fflush( globs.cmdout );
+}
+
+
+char * outf_int( int value )
+{
+ char buffer[50];
+ sprintf( buffer, "%i", value );
+ return newstr( buffer );
+}
+
+
+char * outf_double( double value )
+{
+ char buffer[50];
+ sprintf( buffer, "%f", value );
+ return newstr( buffer );
+}
+
+
+char * outf_time( time_t value )
+{
+ char buffer[50];
+ strftime( buffer, 49, "%Y-%m-%d %H:%M:%SZ", gmtime( &value ) );
+ return newstr( buffer );
+}
diff --git a/jam-files/engine/output.h b/jam-files/engine/output.h
new file mode 100644
index 000000000..9e9876cfc
--- /dev/null
+++ b/jam-files/engine/output.h
@@ -0,0 +1,29 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef BJAM_OUTPUT_H
+#define BJAM_OUTPUT_H
+
+#include <time.h>
+
+#define EXIT_OK 0
+#define EXIT_FAIL 1
+#define EXIT_TIMEOUT 2
+
+void out_action(
+ const char * action,
+ const char * target,
+ const char * command,
+ const char * out_data,
+ const char * err_data,
+ int exit_reason
+ );
+
+char * outf_int( int value );
+char * outf_double( double value );
+char * outf_time( time_t value );
+
+#endif
diff --git a/jam-files/engine/parse.c b/jam-files/engine/parse.c
new file mode 100644
index 000000000..9114fa057
--- /dev/null
+++ b/jam-files/engine/parse.c
@@ -0,0 +1,132 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "newstr.h"
+#include "modules.h"
+#include "frames.h"
+
+/*
+ * parse.c - make and destroy parse trees as driven by the parser
+ *
+ * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used,
+ * as per Matt Armstrong.
+ * 09/11/00 (seiwald) - structure reworked to reflect that (*func)()
+ * returns a LIST *.
+ */
+
+static PARSE * yypsave;
+
+void parse_file( char * f, FRAME * frame )
+{
+ /* Suspend scan of current file and push this new file in the stream. */
+ yyfparse( f );
+
+ /* Now parse each block of rules and execute it. Execute it outside of the
+ * parser so that recursive calls to yyrun() work (no recursive yyparse's).
+ */
+
+ for ( ; ; )
+ {
+ PARSE * p;
+
+ /* Filled by yyparse() calling parse_save(). */
+ yypsave = 0;
+
+ /* If parse error or empty parse, outta here. */
+ if ( yyparse() || !( p = yypsave ) )
+ break;
+
+ /* Run the parse tree. */
+ parse_evaluate( p, frame );
+ parse_free( p );
+ }
+}
+
+
+void parse_save( PARSE * p )
+{
+ yypsave = p;
+}
+
+
+PARSE * parse_make(
+ LIST * (* func)( PARSE *, FRAME * ),
+ PARSE * left,
+ PARSE * right,
+ PARSE * third,
+ char * string,
+ char * string1,
+ int num )
+{
+ PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) );
+
+ p->func = func;
+ p->left = left;
+ p->right = right;
+ p->third = third;
+ p->string = string;
+ p->string1 = string1;
+ p->num = num;
+ p->refs = 1;
+ p->rulename = 0;
+
+ if ( left )
+ {
+ p->file = left->file;
+ p->line = left->line;
+ }
+ else
+ {
+ yyinput_stream( &p->file, &p->line );
+ }
+
+ return p;
+}
+
+
+void parse_refer( PARSE * p )
+{
+ ++p->refs;
+}
+
+
+void parse_free( PARSE * p )
+{
+ if ( --p->refs )
+ return;
+
+ if ( p->string )
+ freestr( p->string );
+ if ( p->string1 )
+ freestr( p->string1 );
+ if ( p->left )
+ parse_free( p->left );
+ if ( p->right )
+ parse_free( p->right );
+ if ( p->third )
+ parse_free( p->third );
+ if ( p->rulename )
+ freestr( p->rulename );
+
+ BJAM_FREE( (char *)p );
+}
+
+
+LIST * parse_evaluate( PARSE * p, FRAME * frame )
+{
+ frame->procedure = p;
+ return (*p->func)( p, frame );
+}
diff --git a/jam-files/engine/parse.h b/jam-files/engine/parse.h
new file mode 100644
index 000000000..e324972f7
--- /dev/null
+++ b/jam-files/engine/parse.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef PARSE_DWA20011020_H
+#define PARSE_DWA20011020_H
+
+#include "frames.h"
+#include "modules.h"
+#include "lists.h"
+
+/*
+ * parse.h - make and destroy parse trees as driven by the parser.
+ */
+
+/*
+ * Parse tree node.
+ */
+
+struct _PARSE {
+ LIST * (* func)( PARSE *, FRAME * );
+ PARSE * left;
+ PARSE * right;
+ PARSE * third;
+ char * string;
+ char * string1;
+ int num;
+ int refs;
+/* module * module; */
+ char * rulename;
+ char * file;
+ int line;
+};
+
+void parse_file( char *, FRAME * );
+void parse_save( PARSE * );
+
+PARSE * parse_make(
+ LIST * (* func)( PARSE *, FRAME * ),
+ PARSE * left,
+ PARSE * right,
+ PARSE * third,
+ char * string,
+ char * string1,
+ int num );
+
+void parse_refer ( PARSE * );
+void parse_free ( PARSE * );
+LIST * parse_evaluate( PARSE *, FRAME * );
+
+#endif
diff --git a/jam-files/engine/patchlevel.h b/jam-files/engine/patchlevel.h
new file mode 100644
index 000000000..699efd84b
--- /dev/null
+++ b/jam-files/engine/patchlevel.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* Keep JAMVERSYM in sync with VERSION. */
+/* It can be accessed as $(JAMVERSION) in the Jamfile. */
+
+#define VERSION_MAJOR 2011
+#define VERSION_MINOR 04
+#define VERSION_PATCH 0
+#define VERSION_MAJOR_SYM "2011"
+#define VERSION_MINOR_SYM "04"
+#define VERSION_PATCH_SYM "00"
+#define VERSION "2011.4"
+#define JAMVERSYM "JAMVERSION=2011.4"
diff --git a/jam-files/engine/pathmac.c b/jam-files/engine/pathmac.c
new file mode 100644
index 000000000..e2c250e33
--- /dev/null
+++ b/jam-files/engine/pathmac.c
@@ -0,0 +1,252 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "pathsys.h"
+
+# ifdef OS_MAC
+
+# define DELIM ':'
+
+/*
+ * pathunix.c - manipulate file names on UNIX, NT, OS2
+ *
+ * External routines:
+ *
+ * path_parse() - split a file name into dir/base/suffix/member
+ * path_build() - build a filename given dir/base/suffix/member
+ * path_parent() - make a PATHNAME point to its parent dir
+ *
+ * File_parse() and path_build() just manipuate a string and a structure;
+ * they do not make system calls.
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 12/26/93 (seiwald) - handle dir/.suffix properly in path_build()
+ * 12/19/94 (mikem) - solaris string table insanity support
+ * 12/21/94 (wingerd) Use backslashes for pathnames - the NT way.
+ * 02/14/95 (seiwald) - parse and build /xxx properly
+ * 02/23/95 (wingerd) Compilers on NT can handle "/" in pathnames, so we
+ * should expect hdr searches to come up with strings
+ * like "thing/thing.h". So we need to test for "/" as
+ * well as "\" when parsing pathnames.
+ * 03/16/95 (seiwald) - fixed accursed typo on line 69.
+ * 05/03/96 (seiwald) - split from filent.c, fileunix.c
+ * 12/20/96 (seiwald) - when looking for the rightmost . in a file name,
+ * don't include the archive member name.
+ * 01/10/01 (seiwald) - path_parse now strips the trailing : from the
+ * directory name, unless the directory name is all
+ * :'s, so that $(d:P) works.
+ */
+
+/*
+ * path_parse() - split a file name into dir/base/suffix/member
+ */
+
+void
+path_parse(
+ char *file,
+ PATHNAME *f )
+{
+ char *p, *q;
+ char *end;
+
+ memset( (char *)f, 0, sizeof( *f ) );
+
+ /* Look for <grist> */
+
+ if ( file[0] == '<' && ( p = strchr( file, '>' ) ) )
+ {
+ f->f_grist.ptr = file;
+ f->f_grist.len = p - file;
+ file = p + 1;
+ }
+
+ /* Look for dir: */
+
+ if ( p = strrchr( file, DELIM ) )
+ {
+ f->f_dir.ptr = file;
+ f->f_dir.len = p - file;
+ file = p + 1;
+
+ /* All :'s? Include last : as part of directory name */
+
+ while ( ( p > f->f_dir.ptr ) && ( *--p == DELIM ) );
+
+ if ( p == f->f_dir.ptr )
+ ++f->f_dir.len;
+ }
+
+ end = file + strlen( file );
+
+ /* Look for (member). */
+
+ if ( ( p = strchr( file, '(' ) ) && ( end[-1] == ')' ) )
+ {
+ f->f_member.ptr = p + 1;
+ f->f_member.len = end - p - 2;
+ end = p;
+ }
+
+ /* Look for .suffix */
+ /* This would be memrchr() */
+
+ p = 0;
+ q = file;
+
+ while ( q = memchr( q, '.', end - q ) )
+ p = q++;
+
+ if ( p )
+ {
+ f->f_suffix.ptr = p;
+ f->f_suffix.len = end - p;
+ end = p;
+ }
+
+ /* Leaves base */
+
+ f->f_base.ptr = file;
+ f->f_base.len = end - file;
+}
+
+/*
+ * path_build() - build a filename given dir/base/suffix/member.
+ */
+
+# define DIR_EMPTY 0 /* "" */
+# define DIR_DOT 1 /* : */
+# define DIR_DOTDOT 2 /* :: */
+# define DIR_ABS 3 /* dira:dirb: */
+# define DIR_REL 4 /* :dira:dirb: */
+
+# define G_DIR 0 /* take dir */
+# define G_ROOT 1 /* take root */
+# define G_CAT 2 /* prepend root to dir */
+# define G_DTDR 3 /* :: of rel dir */
+# define G_DDDD 4 /* make it ::: (../..) */
+# define G_MT 5 /* leave it empty */
+
+char grid[5][5] = {
+/* EMPTY DOT DOTDOT ABS REL */
+/* EMPTY */ { G_MT, G_DIR, G_DIR, G_DIR, G_DIR },
+/* DOT */ { G_ROOT, G_DIR, G_DIR, G_DIR, G_DIR },
+/* DOTDOT */ { G_ROOT, G_ROOT, G_DDDD, G_DIR, G_DTDR },
+/* ABS */ { G_ROOT, G_ROOT, G_ROOT, G_DIR, G_CAT },
+/* REL */ { G_ROOT, G_ROOT, G_ROOT, G_DIR, G_CAT }
+};
+
+static int file_flags( char * ptr, int len )
+{
+ if ( !len )
+ return DIR_EMPTY;
+ if ( ( len == 1 ) && ( ptr[0] == DELIM ) )
+ return DIR_DOT;
+ if ( ( len == 2 ) && ( ptr[0] == DELIM ) && ( ptr[1] == DELIM ) )
+ return DIR_DOTDOT;
+ if ( ptr[0] == DELIM )
+ return DIR_REL;
+ return DIR_ABS;
+}
+
+
+void path_build( PATHNAME * f, string * file, int binding )
+{
+ int dflag;
+ int rflag;
+ int act;
+
+ file_build1( f, file );
+
+ /* Combine root & directory, according to the grid. */
+
+ dflag = file_flags( f->f_dir.ptr, f->f_dir.len );
+ rflag = file_flags( f->f_root.ptr, f->f_root.len );
+
+ switch ( act = grid[ rflag ][ dflag ] )
+ {
+ case G_DTDR:
+ {
+ /* :: of rel dir */
+ string_push_back( file, DELIM );
+ }
+ /* fall through */
+
+ case G_DIR:
+ /* take dir */
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+ break;
+
+ case G_ROOT:
+ /* take root */
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
+ break;
+
+ case G_CAT:
+ /* prepend root to dir */
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
+ if ( file->value[ file->size - 1 ] == DELIM )
+ string_pop_back( file );
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+ break;
+
+ case G_DDDD:
+ /* make it ::: (../..) */
+ string_append( file, ":::" );
+ break;
+ }
+
+ /* Put : between dir and file (if none already). */
+
+ if ( ( act != G_MT ) &&
+ ( file->value[ file->size - 1 ] != DELIM ) &&
+ ( f->f_base.len || f->f_suffix.len ) )
+ {
+ string_push_back( file, DELIM );
+ }
+
+ if ( f->f_base.len )
+ string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
+
+ if ( f->f_suffix.len )
+ string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
+
+ if ( f->f_member.len )
+ {
+ string_push_back( file, '(' );
+ string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
+ string_push_back( file, ')' );
+ }
+
+ if ( DEBUG_SEARCH )
+ printf( " -> '%s'\n", file->value );
+}
+
+
+/*
+ * path_parent() - make a PATHNAME point to its parent dir
+ */
+
+void path_parent( PATHNAME * f )
+{
+ /* Just set everything else to nothing. */
+
+ f->f_base.ptr =
+ f->f_suffix.ptr =
+ f->f_member.ptr = "";
+
+ f->f_base.len =
+ f->f_suffix.len =
+ f->f_member.len = 0;
+}
+
+# endif /* OS_MAC */
diff --git a/jam-files/engine/pathsys.h b/jam-files/engine/pathsys.h
new file mode 100644
index 000000000..737758105
--- /dev/null
+++ b/jam-files/engine/pathsys.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * pathsys.h - PATHNAME struct
+ */
+
+/*
+ * PATHNAME - a name of a file, broken into <grist>dir/base/suffix(member)
+ *
+ * <grist> is salt to distinguish between targets that otherwise would
+ * have the same name: it never appears in the bound name of a target.
+ * (member) is an archive member name: the syntax is arbitrary, but must
+ * agree in path_parse(), path_build() and the Jambase.
+ *
+ * On VMS, we keep track of whether the original path was a directory
+ * (without a file), so that $(VAR:D) can climb to the parent.
+ */
+
+#ifndef PATHSYS_VP_20020211_H
+# define PATHSYS_VP_20020211_H
+
+#include "strings.h"
+
+typedef struct _pathname PATHNAME;
+typedef struct _pathpart PATHPART;
+
+struct _pathpart
+{
+ char * ptr;
+ int len;
+};
+
+struct _pathname
+{
+ PATHPART part[6];
+#ifdef OS_VMS
+ int parent;
+#endif
+
+#define f_grist part[0]
+#define f_root part[1]
+#define f_dir part[2]
+#define f_base part[3]
+#define f_suffix part[4]
+#define f_member part[5]
+};
+
+void path_build( PATHNAME * f, string * file, int binding );
+void path_build1( PATHNAME * f, string * file );
+
+void path_parse( char * file, PATHNAME * f );
+void path_parent( PATHNAME * f );
+
+#ifdef NT
+
+/** Returns newstr-allocated string with long equivivalent of 'short_name'.
+ If none exists -- i.e. 'short_path' is already long path, it's returned
+ unaltered. */
+char * short_path_to_long_path( char * short_path );
+
+#endif
+
+#ifdef USE_PATHUNIX
+/** Returns a static pointer to the system dependent path to the temporary
+ directory. NOTE: *without* a trailing path separator.
+*/
+const char * path_tmpdir( void );
+
+/** Returns a new temporary name.
+*/
+const char * path_tmpnam( void );
+
+/** Returns a new temporary path.
+*/
+const char * path_tmpfile( void );
+#endif
+
+/** Give the first argument to 'main', return a full path to
+ our executable. Returns null in the unlikely case it
+ cannot be determined. Caller is responsible for freeing
+ the string.
+
+ Implemented in jam.c
+*/
+char * executable_path (char *argv0);
+
+#endif
diff --git a/jam-files/engine/pathunix.c b/jam-files/engine/pathunix.c
new file mode 100644
index 000000000..2daad14b7
--- /dev/null
+++ b/jam-files/engine/pathunix.c
@@ -0,0 +1,457 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "pathsys.h"
+# include "strings.h"
+# include "newstr.h"
+# include "filesys.h"
+# include <time.h>
+# include <stdlib.h>
+# ifndef OS_NT
+# include <unistd.h>
+# endif
+
+# ifdef USE_PATHUNIX
+
+/*
+ * pathunix.c - manipulate file names on UNIX, NT, OS2, AmigaOS
+ *
+ * External routines:
+ *
+ * path_parse() - split a file name into dir/base/suffix/member
+ * path_build() - build a filename given dir/base/suffix/member
+ * path_parent() - make a PATHNAME point to its parent dir
+ *
+ * File_parse() and path_build() just manipuate a string and a structure;
+ * they do not make system calls.
+ *
+ * 04/08/94 (seiwald) - Coherent/386 support added.
+ * 12/26/93 (seiwald) - handle dir/.suffix properly in path_build()
+ * 12/19/94 (mikem) - solaris string table insanity support
+ * 12/21/94 (wingerd) Use backslashes for pathnames - the NT way.
+ * 02/14/95 (seiwald) - parse and build /xxx properly
+ * 02/23/95 (wingerd) Compilers on NT can handle "/" in pathnames, so we
+ * should expect hdr searches to come up with strings
+ * like "thing/thing.h". So we need to test for "/" as
+ * well as "\" when parsing pathnames.
+ * 03/16/95 (seiwald) - fixed accursed typo on line 69.
+ * 05/03/96 (seiwald) - split from filent.c, fileunix.c
+ * 12/20/96 (seiwald) - when looking for the rightmost . in a file name,
+ * don't include the archive member name.
+ * 01/13/01 (seiwald) - turn on \ handling on UNIX, on by accident
+ */
+
+/*
+ * path_parse() - split a file name into dir/base/suffix/member
+ */
+
+void path_parse( char * file, PATHNAME * f )
+{
+ char * p;
+ char * q;
+ char * end;
+
+ memset( (char *)f, 0, sizeof( *f ) );
+
+ /* Look for <grist> */
+
+ if ( ( file[0] == '<' ) && ( p = strchr( file, '>' ) ) )
+ {
+ f->f_grist.ptr = file;
+ f->f_grist.len = p - file;
+ file = p + 1;
+ }
+
+ /* Look for dir/ */
+
+ p = strrchr( file, '/' );
+
+# if PATH_DELIM == '\\'
+ /* On NT, look for dir\ as well */
+ {
+ char *p1 = strrchr( file, '\\' );
+ p = p1 > p ? p1 : p;
+ }
+# endif
+
+ if ( p )
+ {
+ f->f_dir.ptr = file;
+ f->f_dir.len = p - file;
+
+ /* Special case for / - dirname is /, not "" */
+
+ if ( !f->f_dir.len )
+ f->f_dir.len = 1;
+
+# if PATH_DELIM == '\\'
+ /* Special case for D:/ - dirname is D:/, not "D:" */
+
+ if ( f->f_dir.len == 2 && file[1] == ':' )
+ f->f_dir.len = 3;
+# endif
+
+ file = p + 1;
+ }
+
+ end = file + strlen( file );
+
+ /* Look for (member) */
+
+ if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
+ {
+ f->f_member.ptr = p + 1;
+ f->f_member.len = end - p - 2;
+ end = p;
+ }
+
+ /* Look for .suffix */
+ /* This would be memrchr() */
+
+ p = 0;
+ q = file;
+
+ while ( ( q = (char *)memchr( q, '.', end - q ) ) )
+ p = q++;
+
+ if ( p )
+ {
+ f->f_suffix.ptr = p;
+ f->f_suffix.len = end - p;
+ end = p;
+ }
+
+ /* Leaves base */
+
+ f->f_base.ptr = file;
+ f->f_base.len = end - file;
+}
+
+/*
+ * path_delims - the string of legal path delimiters
+ */
+static char path_delims[] = {
+ PATH_DELIM,
+# if PATH_DELIM == '\\'
+ '/',
+# endif
+ 0
+};
+
+/*
+ * is_path_delim() - true iff c is a path delimiter
+ */
+static int is_path_delim( char c )
+{
+ char* p = strchr( path_delims, c );
+ return p && *p;
+}
+
+/*
+ * as_path_delim() - convert c to a path delimiter if it isn't one
+ * already
+ */
+static char as_path_delim( char c )
+{
+ return is_path_delim( c ) ? c : PATH_DELIM;
+}
+
+/*
+ * path_build() - build a filename given dir/base/suffix/member
+ *
+ * To avoid changing slash direction on NT when reconstituting paths,
+ * instead of unconditionally appending PATH_DELIM we check the
+ * past-the-end character of the previous path element. If it is in
+ * path_delims, we append that, and only append PATH_DELIM as a last
+ * resort. This heuristic is based on the fact that PATHNAME objects
+ * are usually the result of calling path_parse, which leaves the
+ * original slashes in the past-the-end position. Correctness depends
+ * on the assumption that all strings are zero terminated, so a
+ * past-the-end character will always be available.
+ *
+ * As an attendant patch, we had to ensure that backslashes are used
+ * explicitly in timestamp.c
+ */
+
+void
+path_build(
+ PATHNAME *f,
+ string *file,
+ int binding )
+{
+ file_build1( f, file );
+
+ /* Don't prepend root if it's . or directory is rooted */
+# if PATH_DELIM == '/'
+
+ if ( f->f_root.len
+ && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
+ && !( f->f_dir.len && f->f_dir.ptr[0] == '/' ) )
+
+# else /* unix */
+
+ if ( f->f_root.len
+ && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
+ && !( f->f_dir.len && f->f_dir.ptr[0] == '/' )
+ && !( f->f_dir.len && f->f_dir.ptr[0] == '\\' )
+ && !( f->f_dir.len && f->f_dir.ptr[1] == ':' ) )
+
+# endif /* unix */
+
+ {
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
+ /* If 'root' already ends with path delimeter,
+ don't add yet another one. */
+ if ( ! is_path_delim( f->f_root.ptr[f->f_root.len-1] ) )
+ string_push_back( file, as_path_delim( f->f_root.ptr[f->f_root.len] ) );
+ }
+
+ if ( f->f_dir.len )
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+
+ /* UNIX: Put / between dir and file */
+ /* NT: Put \ between dir and file */
+
+ if ( f->f_dir.len && ( f->f_base.len || f->f_suffix.len ) )
+ {
+ /* UNIX: Special case for dir \ : don't add another \ */
+ /* NT: Special case for dir / : don't add another / */
+
+# if PATH_DELIM == '\\'
+ if ( !( f->f_dir.len == 3 && f->f_dir.ptr[1] == ':' ) )
+# endif
+ if ( !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[0] ) ) )
+ string_push_back( file, as_path_delim( f->f_dir.ptr[f->f_dir.len] ) );
+ }
+
+ if ( f->f_base.len )
+ {
+ string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
+ }
+
+ if ( f->f_suffix.len )
+ {
+ string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
+ }
+
+ if ( f->f_member.len )
+ {
+ string_push_back( file, '(' );
+ string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
+ string_push_back( file, ')' );
+ }
+}
+
+/*
+ * path_parent() - make a PATHNAME point to its parent dir
+ */
+
+void
+path_parent( PATHNAME *f )
+{
+ /* just set everything else to nothing */
+
+ f->f_base.ptr =
+ f->f_suffix.ptr =
+ f->f_member.ptr = "";
+
+ f->f_base.len =
+ f->f_suffix.len =
+ f->f_member.len = 0;
+}
+
+#ifdef NT
+#include <windows.h>
+#include <tchar.h>
+
+/* The definition of this in winnt.h is not ANSI-C compatible. */
+#undef INVALID_FILE_ATTRIBUTES
+#define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
+
+
+DWORD ShortPathToLongPath(LPCTSTR lpszShortPath,LPTSTR lpszLongPath,DWORD
+ cchBuffer)
+{
+ LONG i=0;
+ TCHAR path[_MAX_PATH]={0};
+ TCHAR ret[_MAX_PATH]={0};
+ LONG pos=0, prev_pos=0;
+ LONG len=_tcslen(lpszShortPath);
+
+ /* Is the string valid? */
+ if (!lpszShortPath) {
+ SetLastError(ERROR_INVALID_PARAMETER);
+ return 0;
+ }
+
+ /* Is the path valid? */
+ if (GetFileAttributes(lpszShortPath)==INVALID_FILE_ATTRIBUTES)
+ return 0;
+
+ /* Convert "/" to "\" */
+ for (i=0;i<len;++i) {
+ if (lpszShortPath[i]==_T('/'))
+ path[i]=_T('\\');
+ else
+ path[i]=lpszShortPath[i];
+ }
+
+ /* UNC path? */
+ if (path[0]==_T('\\') && path[1]==_T('\\')) {
+ pos=2;
+ for (i=0;i<2;++i) {
+ while (path[pos]!=_T('\\') && path[pos]!=_T('\0'))
+ ++pos;
+ ++pos;
+ }
+ _tcsncpy(ret,path,pos-1);
+ } /* Drive letter? */
+ else if (path[1]==_T(':')) {
+ if (path[2]==_T('\\'))
+ pos=3;
+ if (len==3) {
+ if (cchBuffer>3)
+ _tcscpy(lpszLongPath,lpszShortPath);
+ return len;
+ }
+ _tcsncpy(ret,path,2);
+ }
+
+ /* Expand the path for each subpath, and strip trailing backslashes */
+ for (prev_pos = pos-1;pos<=len;++pos) {
+ if (path[pos]==_T('\\') || (path[pos]==_T('\0') &&
+ path[pos-1]!=_T('\\'))) {
+ WIN32_FIND_DATA fd;
+ HANDLE hf=0;
+ TCHAR c=path[pos];
+ char* new_element;
+ path[pos]=_T('\0');
+
+ /* the path[prev_pos+1]... path[pos] range is the part of
+ path we're handling right now. We need to find long
+ name for that element and add it. */
+ new_element = path + prev_pos + 1;
+
+ /* First add separator, but only if there's something in result already. */
+ if (ret[0] != _T('\0'))
+ {
+ _tcscat(ret,_T("\\"));
+ }
+
+ /* If it's ".." element, we need to append it, not
+ the name in parent that FindFirstFile will return.
+ Same goes for "." */
+
+ if (new_element[0] == _T('.') && new_element[1] == _T('\0') ||
+ new_element[0] == _T('.') && new_element[1] == _T('.')
+ && new_element[2] == _T('\0'))
+ {
+ _tcscat(ret, new_element);
+ }
+ else
+ {
+ hf=FindFirstFile(path, &fd);
+ if (hf==INVALID_HANDLE_VALUE)
+ return 0;
+
+ _tcscat(ret,fd.cFileName);
+ FindClose(hf);
+ }
+
+ path[pos]=c;
+
+ prev_pos = pos;
+ }
+ }
+
+ len=_tcslen(ret)+1;
+ if (cchBuffer>=len)
+ _tcscpy(lpszLongPath,ret);
+
+ return len;
+}
+
+char* short_path_to_long_path(char* short_path)
+{
+ char buffer2[_MAX_PATH];
+ int ret = ShortPathToLongPath(short_path, buffer2, _MAX_PATH);
+
+ if (ret)
+ return newstr(buffer2);
+ else
+ return newstr(short_path);
+}
+
+#endif
+
+static string path_tmpdir_buffer[1];
+static const char * path_tmpdir_result = 0;
+
+const char * path_tmpdir()
+{
+ if (!path_tmpdir_result)
+ {
+ # ifdef OS_NT
+ DWORD pathLength = 0;
+ pathLength = GetTempPath(pathLength,NULL);
+ string_new(path_tmpdir_buffer);
+ string_reserve(path_tmpdir_buffer,pathLength);
+ pathLength = GetTempPathA(pathLength,path_tmpdir_buffer[0].value);
+ path_tmpdir_buffer[0].value[pathLength-1] = '\0';
+ path_tmpdir_buffer[0].size = pathLength-1;
+ # else
+ const char * t = getenv("TMPDIR");
+ if (!t)
+ {
+ t = "/tmp";
+ }
+ string_new(path_tmpdir_buffer);
+ string_append(path_tmpdir_buffer,t);
+ # endif
+ path_tmpdir_result = path_tmpdir_buffer[0].value;
+ }
+ return path_tmpdir_result;
+}
+
+const char * path_tmpnam(void)
+{
+ char name_buffer[64];
+ # ifdef OS_NT
+ unsigned long c0 = GetCurrentProcessId();
+ # else
+ unsigned long c0 = getpid();
+ # endif
+ static unsigned long c1 = 0;
+ if (0 == c1) c1 = time(0)&0xffff;
+ c1 += 1;
+ sprintf(name_buffer,"jam%lx%lx.000",c0,c1);
+ return newstr(name_buffer);
+}
+
+const char * path_tmpfile(void)
+{
+ const char * result = 0;
+
+ string file_path;
+ string_copy(&file_path,path_tmpdir());
+ string_push_back(&file_path,PATH_DELIM);
+ string_append(&file_path,path_tmpnam());
+ result = newstr(file_path.value);
+ string_free(&file_path);
+
+ return result;
+}
+
+
+# endif /* unix, NT, OS/2, AmigaOS */
diff --git a/jam-files/engine/pathvms.c b/jam-files/engine/pathvms.c
new file mode 100644
index 000000000..975fe5a57
--- /dev/null
+++ b/jam-files/engine/pathvms.c
@@ -0,0 +1,406 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+# include "pathsys.h"
+
+# ifdef OS_VMS
+
+# define DEBUG
+
+/*
+ * pathvms.c - manipulate file names on VMS
+ *
+ * External routines:
+ *
+ * path_parse() - split a file name into dir/base/suffix/member
+ * path_build() - build a filename given dir/base/suffix/member
+ * path_parent() - make a PATHNAME point to its parent dir
+ *
+ * File_parse() and path_build() just manipuate a string and a structure;
+ * they do not make system calls.
+ *
+ * WARNING! This file contains voodoo logic, as black magic is
+ * necessary for wrangling with VMS file name. Woe be to people
+ * who mess with this code.
+ *
+ * 02/09/95 (seiwald) - bungled R=[xxx] - was using directory length!
+ * 05/03/96 (seiwald) - split from filevms.c
+ */
+
+/*
+ * path_parse() - split a file name into dir/base/suffix/member.
+ */
+
+void path_parse( char * file, PATHNAME * f )
+{
+ char * p;
+ char * q;
+ char * end;
+
+ memset( (char *)f, 0, sizeof( *f ) );
+
+ /* Look for <grist> */
+
+ if ( ( file[0] == '<' ) && ( p = strchr( file, '>' ) ) )
+ {
+ f->f_grist.ptr = file;
+ f->f_grist.len = p - file;
+ file = p + 1;
+ }
+
+ /* Look for dev:[dir] or dev: */
+
+ if ( ( p = strchr( file, ']' ) ) || ( p = strchr( file, ':' ) ) )
+ {
+ f->f_dir.ptr = file;
+ f->f_dir.len = p + 1 - file;
+ file = p + 1;
+ }
+
+ end = file + strlen( file );
+
+ /* Look for (member). */
+
+ if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
+ {
+ f->f_member.ptr = p + 1;
+ f->f_member.len = end - p - 2;
+ end = p;
+ }
+
+ /* Look for .suffix */
+ /* This would be memrchr(). */
+
+ p = 0;
+ q = file;
+
+ while ( q = (char *)memchr( q, '.', end - q ) )
+ p = q++;
+
+ if ( p )
+ {
+ f->f_suffix.ptr = p;
+ f->f_suffix.len = end - p;
+ end = p;
+ }
+
+ /* Leaves base. */
+ f->f_base.ptr = file;
+ f->f_base.len = end - file;
+
+ /* Is this a directory without a file spec? */
+ f->parent = 0;
+}
+
+/*
+ * dir mods result
+ * --- --- ------
+ * Rerooting:
+ *
+ * (none) :R=dev: dev:
+ * devd: :R=dev: devd:
+ * devd:[dir] :R=dev: devd:[dir]
+ * [.dir] :R=dev: dev:[dir] questionable
+ * [dir] :R=dev: dev:[dir]
+ *
+ * (none) :R=[rdir] [rdir] questionable
+ * devd: :R=[rdir] devd:
+ * devd:[dir] :R=[rdir] devd:[dir]
+ * [.dir] :R=[rdir] [rdir.dir] questionable
+ * [dir] :R=[rdir] [rdir]
+ *
+ * (none) :R=dev:[root] dev:[root]
+ * devd: :R=dev:[root] devd:
+ * devd:[dir] :R=dev:[root] devd:[dir]
+ * [.dir] :R=dev:[root] dev:[root.dir]
+ * [dir] :R=dev:[root] [dir]
+ *
+ * Climbing to parent:
+ *
+ */
+
+# define DIR_EMPTY 0 /* empty string */
+# define DIR_DEV 1 /* dev: */
+# define DIR_DEVDIR 2 /* dev:[dir] */
+# define DIR_DOTDIR 3 /* [.dir] */
+# define DIR_DASHDIR 4 /* [-] or [-.dir] */
+# define DIR_ABSDIR 5 /* [dir] */
+# define DIR_ROOT 6 /* [000000] or dev:[000000] */
+
+# define G_DIR 0 /* take just dir */
+# define G_ROOT 1 /* take just root */
+# define G_VAD 2 /* root's dev: + [abs] */
+# define G_DRD 3 /* root's dev:[dir] + [.rel] */
+# define G_VRD 4 /* root's dev: + [.rel] made [abs] */
+# define G_DDD 5 /* root's dev:[dir] + . + [dir] */
+
+static int grid[7][7] = {
+
+/* root/dir EMPTY DEV DEVDIR DOTDIR DASH, ABSDIR ROOT */
+/* EMPTY */ G_DIR, G_DIR, G_DIR, G_DIR, G_DIR, G_DIR, G_DIR,
+/* DEV */ G_ROOT, G_DIR, G_DIR, G_VRD, G_VAD, G_VAD, G_VAD,
+/* DEVDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_VAD, G_VAD, G_VAD,
+/* DOTDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DIR, G_DIR, G_DIR,
+/* DASHDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DDD, G_DIR, G_DIR,
+/* ABSDIR */ G_ROOT, G_DIR, G_DIR, G_DRD, G_DIR, G_DIR, G_DIR,
+/* ROOT */ G_ROOT, G_DIR, G_DIR, G_VRD, G_DIR, G_DIR, G_DIR,
+
+};
+
+struct dirinf
+{
+ int flags;
+
+ struct
+ {
+ char * ptr;
+ int len;
+ } dev, dir;
+};
+
+static char * strnchr( char * buf, int c, int len )
+{
+ while ( len-- )
+ if ( *buf && ( *buf++ == c ) )
+ return buf - 1;
+ return 0;
+}
+
+
+static void dir_flags( char * buf, int len, struct dirinf * i )
+{
+ char * p;
+
+ if ( !buf || !len )
+ {
+ i->flags = DIR_EMPTY;
+ i->dev.ptr =
+ i->dir.ptr = 0;
+ i->dev.len =
+ i->dir.len = 0;
+ }
+ else if ( p = strnchr( buf, ':', len ) )
+ {
+ i->dev.ptr = buf;
+ i->dev.len = p + 1 - buf;
+ i->dir.ptr = buf + i->dev.len;
+ i->dir.len = len - i->dev.len;
+ i->flags = i->dir.len && *i->dir.ptr == '[' ? DIR_DEVDIR : DIR_DEV;
+ }
+ else
+ {
+ i->dev.ptr = buf;
+ i->dev.len = 0;
+ i->dir.ptr = buf;
+ i->dir.len = len;
+
+ if ( ( *buf == '[' ) && ( buf[1] == ']' ) )
+ i->flags = DIR_EMPTY;
+ else if ( ( *buf == '[' ) && ( buf[1] == '.' ) )
+ i->flags = DIR_DOTDIR;
+ else if ( ( *buf == '[' ) && ( buf[1] == '-' ) )
+ i->flags = DIR_DASHDIR;
+ else
+ i->flags = DIR_ABSDIR;
+ }
+
+ /* But if its rooted in any way. */
+
+ if ( ( i->dir.len == 8 ) && !strncmp( i->dir.ptr, "[000000]", 8 ) )
+ i->flags = DIR_ROOT;
+}
+
+
+/*
+ * path_build() - build a filename given dir/base/suffix/member
+ */
+
+void path_build( PATHNAME * f, string * file, int binding )
+{
+ struct dirinf root;
+ struct dirinf dir;
+ int g;
+
+ file_build1( f, file );
+
+ /* Get info on root and dir for combining. */
+ dir_flags( f->f_root.ptr, f->f_root.len, &root );
+ dir_flags( f->f_dir.ptr, f->f_dir.len, &dir );
+
+ /* Combine. */
+ switch ( g = grid[ root.flags ][ dir.flags ] )
+ {
+ case G_DIR:
+ /* take dir */
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+ break;
+
+ case G_ROOT:
+ /* take root */
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
+ break;
+
+ case G_VAD:
+ /* root's dev + abs directory */
+ string_append_range( file, root.dev.ptr, root.dev.ptr + root.dev.len );
+ string_append_range( file, dir.dir.ptr, dir.dir.ptr + dir.dir.len );
+ break;
+
+ case G_DRD:
+ case G_DDD:
+ /* root's dev:[dir] + rel directory */
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
+
+ /* sanity checks: root ends with ] */
+
+ if ( file->value[file->size - 1] == ']' )
+ string_pop_back( file );
+
+ /* Add . if separating two -'s */
+
+ if ( g == G_DDD )
+ string_push_back( file, '.' );
+
+ /* skip [ of dir */
+ string_append_range( file, dir.dir.ptr + 1, dir.dir.ptr + 1 + dir.dir.len - 1 );
+ break;
+
+ case G_VRD:
+ /* root's dev + rel directory made abs */
+ string_append_range( file, root.dev.ptr, root.dev.ptr + root.dev.len );
+ string_push_back( file, '[' );
+ /* skip [. of rel dir */
+ string_append_range( file, dir.dir.ptr + 2, dir.dir.ptr + 2 + dir.dir.len - 2 );
+ break;
+ }
+
+# ifdef DEBUG
+ if ( DEBUG_SEARCH && ( root.flags || dir.flags ) )
+ printf( "%d x %d = %d (%s)\n", root.flags, dir.flags,
+ grid[ root.flags ][ dir.flags ], file->value );
+# endif
+
+ /*
+ * Now do the special :P modifier when no file was present.
+ * (none) (none)
+ * [dir1.dir2] [dir1]
+ * [dir] [000000]
+ * [.dir] (none)
+ * [] []
+ */
+
+ if ( ( file->value[ file->size - 1 ] == ']' ) && f->parent )
+ {
+ char * p = file->value + file->size;
+ while ( p-- > file->value )
+ {
+ if ( *p == '.' )
+ {
+ /* If we've truncated everything and left with '[',
+ return empty string. */
+ if ( p == file->value + 1 )
+ string_truncate( file, 0 );
+ else
+ {
+ string_truncate( file, p - file->value );
+ string_push_back( file, ']' );
+ }
+ break;
+ }
+
+ if ( *p == '-' )
+ {
+ /* handle .- or - */
+ if ( ( p > file->value ) && ( p[ -1 ] == '.' ) )
+ --p;
+
+ *p++ = ']';
+ break;
+ }
+
+ if ( *p == '[' )
+ {
+ if ( p[ 1 ] == ']' )
+ {
+ /* CONSIDER: I don't see any use of this code. We immediately
+ break, and 'p' is a local variable. */
+ p += 2;
+ }
+ else
+ {
+ string_truncate( file, p - file->value );
+ string_append( file, "[000000]" );
+ }
+ break;
+ }
+ }
+ }
+
+ /* Now copy the file pieces. */
+ if ( f->f_base.len )
+ {
+ string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
+ }
+
+ /* If there is no suffix, we append a "." onto all generated names. This
+ * keeps VMS from appending its own (wrong) idea of what the suffix should
+ * be.
+ */
+ if ( f->f_suffix.len )
+ string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
+ else if ( binding && f->f_base.len )
+ string_push_back( file, '.' );
+
+ if ( f->f_member.len )
+ {
+ string_push_back( file, '(' );
+ string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
+ string_push_back( file, ')' );
+ }
+
+# ifdef DEBUG
+ if ( DEBUG_SEARCH )
+ printf( "built %.*s + %.*s / %.*s suf %.*s mem %.*s -> %s\n",
+ f->f_root.len, f->f_root.ptr,
+ f->f_dir.len, f->f_dir.ptr,
+ f->f_base.len, f->f_base.ptr,
+ f->f_suffix.len, f->f_suffix.ptr,
+ f->f_member.len, f->f_member.ptr,
+ file->value );
+# endif
+}
+
+
+/*
+ * path_parent() - make a PATHNAME point to its parent dir
+ */
+
+void path_parent( PATHNAME * f )
+{
+ if ( f->f_base.len )
+ {
+ f->f_base.ptr =
+ f->f_suffix.ptr =
+ f->f_member.ptr = "";
+
+ f->f_base.len =
+ f->f_suffix.len =
+ f->f_member.len = 0;
+ }
+ else
+ {
+ f->parent = 1;
+ }
+}
+
+# endif /* VMS */
diff --git a/jam-files/engine/pwd.c b/jam-files/engine/pwd.c
new file mode 100644
index 000000000..90c8eb175
--- /dev/null
+++ b/jam-files/engine/pwd.c
@@ -0,0 +1,66 @@
+/* Copyright Vladimir Prus 2002, Rene Rivera 2005. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "jam.h"
+#include "lists.h"
+#include "newstr.h"
+#include "pathsys.h"
+#include "mem.h"
+
+#include <limits.h>
+#include <errno.h>
+
+/* MinGW on windows declares PATH_MAX in limits.h */
+#if defined(NT) && ! defined(__GNUC__)
+#include <direct.h>
+#define PATH_MAX _MAX_PATH
+#else
+#include <unistd.h>
+#if defined(__COMO__)
+ #include <linux/limits.h>
+#endif
+#endif
+
+#ifndef PATH_MAX
+ #define PATH_MAX 1024
+#endif
+
+/* The current directory can't change in bjam, so optimize this to cache
+** the result.
+*/
+static char * pwd_result = NULL;
+
+
+LIST*
+pwd(void)
+{
+ if (!pwd_result)
+ {
+ int buffer_size = PATH_MAX;
+ char * result_buffer = 0;
+ do
+ {
+ char * buffer = BJAM_MALLOC_RAW(buffer_size);
+ result_buffer = getcwd(buffer,buffer_size);
+ if (result_buffer)
+ {
+ #ifdef NT
+ pwd_result = short_path_to_long_path(result_buffer);
+ #else
+ pwd_result = newstr(result_buffer);
+ #endif
+ }
+ buffer_size *= 2;
+ BJAM_FREE_RAW(buffer);
+ }
+ while (!pwd_result && errno == ERANGE);
+
+ if (!pwd_result)
+ {
+ perror("can not get current directory");
+ return L0;
+ }
+ }
+ return list_new(L0, pwd_result);
+}
diff --git a/jam-files/engine/pwd.h b/jam-files/engine/pwd.h
new file mode 100644
index 000000000..37cb531e4
--- /dev/null
+++ b/jam-files/engine/pwd.h
@@ -0,0 +1,10 @@
+/* Copyright Vladimir Prus 2002. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#ifndef PWD_H
+#define PWD_H
+
+LIST* pwd(void);
+
+#endif
diff --git a/jam-files/engine/regexp.c b/jam-files/engine/regexp.c
new file mode 100644
index 000000000..30197a2fe
--- /dev/null
+++ b/jam-files/engine/regexp.c
@@ -0,0 +1,1328 @@
+/*
+ * regcomp and regexec -- regsub and regerror are elsewhere
+ *
+ * Copyright (c) 1986 by University of Toronto.
+ * Written by Henry Spencer. Not derived from licensed software.
+ *
+ * Permission is granted to anyone to use this software for any
+ * purpose on any computer system, and to redistribute it freely,
+ * subject to the following restrictions:
+ *
+ * 1. The author is not responsible for the consequences of use of
+ * this software, no matter how awful, even if they arise
+ * from defects in it.
+ *
+ * 2. The origin of this software must not be misrepresented, either
+ * by explicit claim or by omission.
+ *
+ * 3. Altered versions must be plainly marked as such, and must not
+ * be misrepresented as being the original software.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to |
+ *** to assist in implementing egrep.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching
+ *** as in BSD grep and ex.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \.
+ *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods,
+ *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy.
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h
+ *** was moved into regexp.h, and the include of regexp.h now uses "'s
+ *** to avoid conflicting with the system regexp.h. Const, bless its
+ *** soul, was removed so it can compile everywhere. The declaration
+ *** of strchr() was in conflict on AIX, so it was removed (as it is
+ *** happily defined in string.h).
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@perforce.com, on 20 January 2000, to use function prototypes.
+ *
+ * Beware that some of this code is subtly aware of the way operator precedence
+ * is structured in regular expressions. Serious changes in regular-expression
+ * syntax might require a total rethink.
+ */
+
+
+#include "jam.h"
+#include "regexp.h"
+#include <stdio.h>
+#include <ctype.h>
+#ifndef ultrix
+ #include <stdlib.h>
+#endif
+#include <string.h>
+
+
+/*
+ * The "internal use only" fields in regexp.h are present to pass info from
+ * compile to execute that permits the execute phase to run lots faster on
+ * simple cases. They are:
+ :
+ * regstart char that must begin a match; '\0' if none obvious.
+ * reganch is the match anchored (at beginning-of-line only)?
+ * regmust string (pointer into program) that match must include, or NULL.
+ * regmlen length of regmust string.
+ *
+ * Regstart and reganch permit very fast decisions on suitable starting points
+ * for a match, cutting down the work a lot. Regmust permits fast rejection of
+ * lines that cannot possibly match. The regmust tests are costly enough that
+ * regcomp() supplies a regmust only if the r.e. contains something potentially
+ * expensive (at present, the only such thing detected is * or + at the start of
+ * the r.e., which can involve a lot of backup). Regmlen is supplied because the
+ * test in regexec() needs it and regcomp() is computing it anyway.
+ */
+
+/*
+ * Structure for regexp "program". This is essentially a linear encoding of a
+ * nondeterministic finite-state machine (aka syntax charts or "railroad normal
+ * form" in parsing technology). Each node is an opcode plus a "next" pointer,
+ * possibly plus an operand. "Next" pointers of all nodes except BRANCH
+ * implement concatenation; a "next" pointer with a BRANCH on both ends of it is
+ * connecting two alternatives. [Here we have one of the subtle syntax
+ * dependencies: an individual BRANCH, as opposed to a collection of them, is
+ * never concatenated with anything because of operator precedence.] The operand
+ * of some types of node is a literal string; for others, it is a node leading
+ * into a sub-FSM. In particular, the operand of a BRANCH node is the first node
+ * of the branch. [NB this is *not* a tree structure: the tail of the branch
+ * connects to the thing following the set of BRANCHes.] The opcodes are:
+ */
+
+/* definition number opnd? meaning */
+#define END 0 /* no End of program. */
+#define BOL 1 /* no Match "" at beginning of line. */
+#define EOL 2 /* no Match "" at end of line. */
+#define ANY 3 /* no Match any one character. */
+#define ANYOF 4 /* str Match any character in this string. */
+#define ANYBUT 5 /* str Match any character not in this string. */
+#define BRANCH 6 /* node Match this alternative, or the next... */
+#define BACK 7 /* no Match "", "next" ptr points backward. */
+#define EXACTLY 8 /* str Match this string. */
+#define NOTHING 9 /* no Match empty string. */
+#define STAR 10 /* node Match this (simple) thing 0 or more times. */
+#define PLUS 11 /* node Match this (simple) thing 1 or more times. */
+#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */
+#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */
+#define OPEN 20 /* no Mark this point in input as start of #n. */
+ /* OPEN+1 is number 1, etc. */
+#define CLOSE 30 /* no Analogous to OPEN. */
+
+
+/*
+ * Opcode notes:
+ *
+ * BRANCH The set of branches constituting a single choice are hooked
+ * together with their "next" pointers, since precedence prevents
+ * anything being concatenated to any individual branch. The
+ * "next" pointer of the last BRANCH in a choice points to the
+ * thing following the whole choice. This is also where the
+ * final "next" pointer of each individual branch points; each
+ * branch starts with the operand node of a BRANCH node.
+ *
+ * BACK Normal "next" pointers all implicitly point forward; BACK
+ * exists to make loop structures possible.
+ *
+ * STAR,PLUS '?', and complex '*' and '+', are implemented as circular
+ * BRANCH structures using BACK. Simple cases (one character
+ * per match) are implemented with STAR and PLUS for speed
+ * and to minimize recursive plunges.
+ *
+ * OPEN,CLOSE ...are numbered at compile time.
+ */
+
+/*
+ * A node is one char of opcode followed by two chars of "next" pointer.
+ * "Next" pointers are stored as two 8-bit pieces, high order first. The
+ * value is a positive offset from the opcode of the node containing it.
+ * An operand, if any, simply follows the node. (Note that much of the
+ * code generation knows about this implicit relationship.)
+ *
+ * Using two bytes for the "next" pointer is vast overkill for most things,
+ * but allows patterns to get big without disasters.
+ */
+#define OP(p) (*(p))
+#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377))
+#define OPERAND(p) ((p) + 3)
+
+/*
+ * See regmagic.h for one further detail of program structure.
+ */
+
+
+/*
+ * Utility definitions.
+ */
+#ifndef CHARBITS
+#define UCHARAT(p) ((int)*(unsigned char *)(p))
+#else
+#define UCHARAT(p) ((int)*(p)&CHARBITS)
+#endif
+
+#define FAIL(m) { regerror(m); return(NULL); }
+#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?')
+
+/*
+ * Flags to be passed up and down.
+ */
+#define HASWIDTH 01 /* Known never to match null string. */
+#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */
+#define SPSTART 04 /* Starts with * or +. */
+#define WORST 0 /* Worst case. */
+
+/*
+ * Global work variables for regcomp().
+ */
+static char *regparse; /* Input-scan pointer. */
+static int regnpar; /* () count. */
+static char regdummy;
+static char *regcode; /* Code-emit pointer; &regdummy = don't. */
+static long regsize; /* Code size. */
+
+/*
+ * Forward declarations for regcomp()'s friends.
+ */
+#ifndef STATIC
+#define STATIC static
+#endif
+STATIC char *reg( int paren, int *flagp );
+STATIC char *regbranch( int *flagp );
+STATIC char *regpiece( int *flagp );
+STATIC char *regatom( int *flagp );
+STATIC char *regnode( int op );
+STATIC char *regnext( register char *p );
+STATIC void regc( int b );
+STATIC void reginsert( char op, char *opnd );
+STATIC void regtail( char *p, char *val );
+STATIC void regoptail( char *p, char *val );
+#ifdef STRCSPN
+STATIC int strcspn();
+#endif
+
+/*
+ - regcomp - compile a regular expression into internal code
+ *
+ * We can't allocate space until we know how big the compiled form will be,
+ * but we can't compile it (and thus know how big it is) until we've got a
+ * place to put the code. So we cheat: we compile it twice, once with code
+ * generation turned off and size counting turned on, and once "for real".
+ * This also means that we don't allocate space until we are sure that the
+ * thing really will compile successfully, and we never have to move the
+ * code and thus invalidate pointers into it. (Note that it has to be in
+ * one piece because free() must be able to free it all.)
+ *
+ * Beware that the optimization-preparation code in here knows about some
+ * of the structure of the compiled regexp.
+ */
+regexp *
+regcomp( char *exp )
+{
+ register regexp *r;
+ register char *scan;
+ register char *longest;
+ register unsigned len;
+ int flags;
+
+ if (exp == NULL)
+ FAIL("NULL argument");
+
+ /* First pass: determine size, legality. */
+#ifdef notdef
+ if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */
+#endif
+ regparse = (char *)exp;
+ regnpar = 1;
+ regsize = 0L;
+ regcode = &regdummy;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Small enough for pointer-storage convention? */
+ if (regsize >= 32767L) /* Probably could be 65535L. */
+ FAIL("regexp too big");
+
+ /* Allocate space. */
+ r = (regexp *)BJAM_MALLOC(sizeof(regexp) + (unsigned)regsize);
+ if (r == NULL)
+ FAIL("out of space");
+
+ /* Second pass: emit code. */
+ regparse = (char *)exp;
+ regnpar = 1;
+ regcode = r->program;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Dig out information for optimizations. */
+ r->regstart = '\0'; /* Worst-case defaults. */
+ r->reganch = 0;
+ r->regmust = NULL;
+ r->regmlen = 0;
+ scan = r->program+1; /* First BRANCH. */
+ if (OP(regnext(scan)) == END) { /* Only one top-level choice. */
+ scan = OPERAND(scan);
+
+ /* Starting-point info. */
+ if (OP(scan) == EXACTLY)
+ r->regstart = *OPERAND(scan);
+ else if (OP(scan) == BOL)
+ r->reganch++;
+
+ /*
+ * If there's something expensive in the r.e., find the
+ * longest literal string that must appear and make it the
+ * regmust. Resolve ties in favor of later strings, since
+ * the regstart check works with the beginning of the r.e.
+ * and avoiding duplication strengthens checking. Not a
+ * strong reason, but sufficient in the absence of others.
+ */
+ if (flags&SPSTART) {
+ longest = NULL;
+ len = 0;
+ for (; scan != NULL; scan = regnext(scan))
+ if (OP(scan) == EXACTLY && strlen(OPERAND(scan)) >= len) {
+ longest = OPERAND(scan);
+ len = strlen(OPERAND(scan));
+ }
+ r->regmust = longest;
+ r->regmlen = len;
+ }
+ }
+
+ return(r);
+}
+
+/*
+ - reg - regular expression, i.e. main body or parenthesized thing
+ *
+ * Caller must absorb opening parenthesis.
+ *
+ * Combining parenthesis handling with the base level of regular expression
+ * is a trifle forced, but the need to tie the tails of the branches to what
+ * follows makes it hard to avoid.
+ */
+static char *
+reg(
+ int paren, /* Parenthesized? */
+ int *flagp )
+{
+ register char *ret;
+ register char *br;
+ register char *ender;
+ register int parno = 0;
+ int flags;
+
+ *flagp = HASWIDTH; /* Tentatively. */
+
+ /* Make an OPEN node, if parenthesized. */
+ if (paren) {
+ if (regnpar >= NSUBEXP)
+ FAIL("too many ()");
+ parno = regnpar;
+ regnpar++;
+ ret = regnode(OPEN+parno);
+ } else
+ ret = NULL;
+
+ /* Pick up the branches, linking them together. */
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ if (ret != NULL)
+ regtail(ret, br); /* OPEN -> first. */
+ else
+ ret = br;
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ while (*regparse == '|' || *regparse == '\n') {
+ regparse++;
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ regtail(ret, br); /* BRANCH -> BRANCH. */
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ }
+
+ /* Make a closing node, and hook it on the end. */
+ ender = regnode((paren) ? CLOSE+parno : END);
+ regtail(ret, ender);
+
+ /* Hook the tails of the branches to the closing node. */
+ for (br = ret; br != NULL; br = regnext(br))
+ regoptail(br, ender);
+
+ /* Check for proper termination. */
+ if (paren && *regparse++ != ')') {
+ FAIL("unmatched ()");
+ } else if (!paren && *regparse != '\0') {
+ if (*regparse == ')') {
+ FAIL("unmatched ()");
+ } else
+ FAIL("junk on end"); /* "Can't happen". */
+ /* NOTREACHED */
+ }
+
+ return(ret);
+}
+
+/*
+ - regbranch - one alternative of an | operator
+ *
+ * Implements the concatenation operator.
+ */
+static char *
+regbranch( int *flagp )
+{
+ register char *ret;
+ register char *chain;
+ register char *latest;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ ret = regnode(BRANCH);
+ chain = NULL;
+ while (*regparse != '\0' && *regparse != ')' &&
+ *regparse != '\n' && *regparse != '|') {
+ latest = regpiece(&flags);
+ if (latest == NULL)
+ return(NULL);
+ *flagp |= flags&HASWIDTH;
+ if (chain == NULL) /* First piece. */
+ *flagp |= flags&SPSTART;
+ else
+ regtail(chain, latest);
+ chain = latest;
+ }
+ if (chain == NULL) /* Loop ran zero times. */
+ (void) regnode(NOTHING);
+
+ return(ret);
+}
+
+/*
+ - regpiece - something followed by possible [*+?]
+ *
+ * Note that the branching code sequences used for ? and the general cases
+ * of * and + are somewhat optimized: they use the same NOTHING node as
+ * both the endmarker for their branch list and the body of the last branch.
+ * It might seem that this node could be dispensed with entirely, but the
+ * endmarker role is not redundant.
+ */
+static char *
+regpiece( int *flagp )
+{
+ register char *ret;
+ register char op;
+ register char *next;
+ int flags;
+
+ ret = regatom(&flags);
+ if (ret == NULL)
+ return(NULL);
+
+ op = *regparse;
+ if (!ISMULT(op)) {
+ *flagp = flags;
+ return(ret);
+ }
+
+ if (!(flags&HASWIDTH) && op != '?')
+ FAIL("*+ operand could be empty");
+ *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH);
+
+ if (op == '*' && (flags&SIMPLE))
+ reginsert(STAR, ret);
+ else if (op == '*') {
+ /* Emit x* as (x&|), where & means "self". */
+ reginsert(BRANCH, ret); /* Either x */
+ regoptail(ret, regnode(BACK)); /* and loop */
+ regoptail(ret, ret); /* back */
+ regtail(ret, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '+' && (flags&SIMPLE))
+ reginsert(PLUS, ret);
+ else if (op == '+') {
+ /* Emit x+ as x(&|), where & means "self". */
+ next = regnode(BRANCH); /* Either */
+ regtail(ret, next);
+ regtail(regnode(BACK), ret); /* loop back */
+ regtail(next, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '?') {
+ /* Emit x? as (x|) */
+ reginsert(BRANCH, ret); /* Either x */
+ regtail(ret, regnode(BRANCH)); /* or */
+ next = regnode(NOTHING); /* null. */
+ regtail(ret, next);
+ regoptail(ret, next);
+ }
+ regparse++;
+ if (ISMULT(*regparse))
+ FAIL("nested *?+");
+
+ return(ret);
+}
+
+/*
+ - regatom - the lowest level
+ *
+ * Optimization: gobbles an entire sequence of ordinary characters so that
+ * it can turn them into a single node, which is smaller to store and
+ * faster to run. Backslashed characters are exceptions, each becoming a
+ * separate node; the code is simpler that way and it's not worth fixing.
+ */
+static char *
+regatom( int *flagp )
+{
+ register char *ret;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ switch (*regparse++) {
+ /* FIXME: these chars only have meaning at beg/end of pat? */
+ case '^':
+ ret = regnode(BOL);
+ break;
+ case '$':
+ ret = regnode(EOL);
+ break;
+ case '.':
+ ret = regnode(ANY);
+ *flagp |= HASWIDTH|SIMPLE;
+ break;
+ case '[': {
+ register int classr;
+ register int classend;
+
+ if (*regparse == '^') { /* Complement of range. */
+ ret = regnode(ANYBUT);
+ regparse++;
+ } else
+ ret = regnode(ANYOF);
+ if (*regparse == ']' || *regparse == '-')
+ regc(*regparse++);
+ while (*regparse != '\0' && *regparse != ']') {
+ if (*regparse == '-') {
+ regparse++;
+ if (*regparse == ']' || *regparse == '\0')
+ regc('-');
+ else {
+ classr = UCHARAT(regparse-2)+1;
+ classend = UCHARAT(regparse);
+ if (classr > classend+1)
+ FAIL("invalid [] range");
+ for (; classr <= classend; classr++)
+ regc(classr);
+ regparse++;
+ }
+ } else
+ regc(*regparse++);
+ }
+ regc('\0');
+ if (*regparse != ']')
+ FAIL("unmatched []");
+ regparse++;
+ *flagp |= HASWIDTH|SIMPLE;
+ }
+ break;
+ case '(':
+ ret = reg(1, &flags);
+ if (ret == NULL)
+ return(NULL);
+ *flagp |= flags&(HASWIDTH|SPSTART);
+ break;
+ case '\0':
+ case '|':
+ case '\n':
+ case ')':
+ FAIL("internal urp"); /* Supposed to be caught earlier. */
+ break;
+ case '?':
+ case '+':
+ case '*':
+ FAIL("?+* follows nothing");
+ break;
+ case '\\':
+ switch (*regparse++) {
+ case '\0':
+ FAIL("trailing \\");
+ break;
+ case '<':
+ ret = regnode(WORDA);
+ break;
+ case '>':
+ ret = regnode(WORDZ);
+ break;
+ /* FIXME: Someday handle \1, \2, ... */
+ default:
+ /* Handle general quoted chars in exact-match routine */
+ goto de_fault;
+ }
+ break;
+ de_fault:
+ default:
+ /*
+ * Encode a string of characters to be matched exactly.
+ *
+ * This is a bit tricky due to quoted chars and due to
+ * '*', '+', and '?' taking the SINGLE char previous
+ * as their operand.
+ *
+ * On entry, the char at regparse[-1] is going to go
+ * into the string, no matter what it is. (It could be
+ * following a \ if we are entered from the '\' case.)
+ *
+ * Basic idea is to pick up a good char in ch and
+ * examine the next char. If it's *+? then we twiddle.
+ * If it's \ then we frozzle. If it's other magic char
+ * we push ch and terminate the string. If none of the
+ * above, we push ch on the string and go around again.
+ *
+ * regprev is used to remember where "the current char"
+ * starts in the string, if due to a *+? we need to back
+ * up and put the current char in a separate, 1-char, string.
+ * When regprev is NULL, ch is the only char in the
+ * string; this is used in *+? handling, and in setting
+ * flags |= SIMPLE at the end.
+ */
+ {
+ char *regprev;
+ register char ch;
+
+ regparse--; /* Look at cur char */
+ ret = regnode(EXACTLY);
+ for ( regprev = 0 ; ; ) {
+ ch = *regparse++; /* Get current char */
+ switch (*regparse) { /* look at next one */
+
+ default:
+ regc(ch); /* Add cur to string */
+ break;
+
+ case '.': case '[': case '(':
+ case ')': case '|': case '\n':
+ case '$': case '^':
+ case '\0':
+ /* FIXME, $ and ^ should not always be magic */
+ magic:
+ regc(ch); /* dump cur char */
+ goto done; /* and we are done */
+
+ case '?': case '+': case '*':
+ if (!regprev) /* If just ch in str, */
+ goto magic; /* use it */
+ /* End mult-char string one early */
+ regparse = regprev; /* Back up parse */
+ goto done;
+
+ case '\\':
+ regc(ch); /* Cur char OK */
+ switch (regparse[1]){ /* Look after \ */
+ case '\0':
+ case '<':
+ case '>':
+ /* FIXME: Someday handle \1, \2, ... */
+ goto done; /* Not quoted */
+ default:
+ /* Backup point is \, scan * point is after it. */
+ regprev = regparse;
+ regparse++;
+ continue; /* NOT break; */
+ }
+ }
+ regprev = regparse; /* Set backup point */
+ }
+ done:
+ regc('\0');
+ *flagp |= HASWIDTH;
+ if (!regprev) /* One char? */
+ *flagp |= SIMPLE;
+ }
+ break;
+ }
+
+ return(ret);
+}
+
+/*
+ - regnode - emit a node
+ */
+static char * /* Location. */
+regnode( int op )
+{
+ register char *ret;
+ register char *ptr;
+
+ ret = regcode;
+ if (ret == &regdummy) {
+ regsize += 3;
+ return(ret);
+ }
+
+ ptr = ret;
+ *ptr++ = op;
+ *ptr++ = '\0'; /* Null "next" pointer. */
+ *ptr++ = '\0';
+ regcode = ptr;
+
+ return(ret);
+}
+
+/*
+ - regc - emit (if appropriate) a byte of code
+ */
+static void
+regc( int b )
+{
+ if (regcode != &regdummy)
+ *regcode++ = b;
+ else
+ regsize++;
+}
+
+/*
+ - reginsert - insert an operator in front of already-emitted operand
+ *
+ * Means relocating the operand.
+ */
+static void
+reginsert(
+ char op,
+ char *opnd )
+{
+ register char *src;
+ register char *dst;
+ register char *place;
+
+ if (regcode == &regdummy) {
+ regsize += 3;
+ return;
+ }
+
+ src = regcode;
+ regcode += 3;
+ dst = regcode;
+ while (src > opnd)
+ *--dst = *--src;
+
+ place = opnd; /* Op node, where operand used to be. */
+ *place++ = op;
+ *place++ = '\0';
+ *place++ = '\0';
+}
+
+/*
+ - regtail - set the next-pointer at the end of a node chain
+ */
+static void
+regtail(
+ char *p,
+ char *val )
+{
+ register char *scan;
+ register char *temp;
+ register int offset;
+
+ if (p == &regdummy)
+ return;
+
+ /* Find last node. */
+ scan = p;
+ for (;;) {
+ temp = regnext(scan);
+ if (temp == NULL)
+ break;
+ scan = temp;
+ }
+
+ if (OP(scan) == BACK)
+ offset = scan - val;
+ else
+ offset = val - scan;
+ *(scan+1) = (offset>>8)&0377;
+ *(scan+2) = offset&0377;
+}
+
+/*
+ - regoptail - regtail on operand of first argument; nop if operandless
+ */
+
+static void
+regoptail(
+ char *p,
+ char *val )
+{
+ /* "Operandless" and "op != BRANCH" are synonymous in practice. */
+ if (p == NULL || p == &regdummy || OP(p) != BRANCH)
+ return;
+ regtail(OPERAND(p), val);
+}
+
+/*
+ * regexec and friends
+ */
+
+/*
+ * Global work variables for regexec().
+ */
+static char *reginput; /* String-input pointer. */
+static char *regbol; /* Beginning of input, for ^ check. */
+static char **regstartp; /* Pointer to startp array. */
+static char **regendp; /* Ditto for endp. */
+
+/*
+ * Forwards.
+ */
+STATIC int regtry( regexp *prog, char *string );
+STATIC int regmatch( char *prog );
+STATIC int regrepeat( char *p );
+
+#ifdef DEBUG
+int regnarrate = 0;
+void regdump();
+STATIC char *regprop();
+#endif
+
+/*
+ - regexec - match a regexp against a string
+ */
+int
+regexec(
+ register regexp *prog,
+ register char *string )
+{
+ register char *s;
+
+ /* Be paranoid... */
+ if (prog == NULL || string == NULL) {
+ regerror("NULL parameter");
+ return(0);
+ }
+
+ /* Check validity of program. */
+ if (UCHARAT(prog->program) != MAGIC) {
+ regerror("corrupted program");
+ return(0);
+ }
+
+ /* If there is a "must appear" string, look for it. */
+ if ( prog->regmust != NULL )
+ {
+ s = (char *)string;
+ while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL )
+ {
+ if ( !strncmp( s, prog->regmust, prog->regmlen ) )
+ break; /* Found it. */
+ ++s;
+ }
+ if ( s == NULL ) /* Not present. */
+ return 0;
+ }
+
+ /* Mark beginning of line for ^ . */
+ regbol = (char *)string;
+
+ /* Simplest case: anchored match need be tried only once. */
+ if ( prog->reganch )
+ return regtry( prog, string );
+
+ /* Messy cases: unanchored match. */
+ s = (char *)string;
+ if (prog->regstart != '\0')
+ /* We know what char it must start with. */
+ while ((s = strchr(s, prog->regstart)) != NULL) {
+ if (regtry(prog, s))
+ return(1);
+ s++;
+ }
+ else
+ /* We do not -- general case. */
+ do {
+ if ( regtry( prog, s ) )
+ return( 1 );
+ } while ( *s++ != '\0' );
+
+ /* Failure. */
+ return 0;
+}
+
+
+/*
+ * regtry() - try match at specific point.
+ */
+
+static int /* 0 failure, 1 success */
+regtry(
+ regexp *prog,
+ char *string )
+{
+ register int i;
+ register char * * sp;
+ register char * * ep;
+
+ reginput = string;
+ regstartp = prog->startp;
+ regendp = prog->endp;
+
+ sp = prog->startp;
+ ep = prog->endp;
+ for ( i = NSUBEXP; i > 0; --i )
+ {
+ *sp++ = NULL;
+ *ep++ = NULL;
+ }
+ if ( regmatch( prog->program + 1 ) )
+ {
+ prog->startp[ 0 ] = string;
+ prog->endp[ 0 ] = reginput;
+ return 1;
+ }
+ else
+ return 0;
+}
+
+
+/*
+ * regmatch() - main matching routine.
+ *
+ * Conceptually the strategy is simple: check to see whether the current node
+ * matches, call self recursively to see whether the rest matches, and then act
+ * accordingly. In practice we make some effort to avoid recursion, in
+ * particular by going through "ordinary" nodes (that do not need to know
+ * whether the rest of the match failed) by a loop instead of by recursion.
+ */
+
+static int /* 0 failure, 1 success */
+regmatch( char * prog )
+{
+ char * scan; /* Current node. */
+ char * next; /* Next node. */
+
+ scan = prog;
+#ifdef DEBUG
+ if (scan != NULL && regnarrate)
+ fprintf(stderr, "%s(\n", regprop(scan));
+#endif
+ while (scan != NULL) {
+#ifdef DEBUG
+ if (regnarrate)
+ fprintf(stderr, "%s...\n", regprop(scan));
+#endif
+ next = regnext(scan);
+
+ switch (OP(scan)) {
+ case BOL:
+ if (reginput != regbol)
+ return(0);
+ break;
+ case EOL:
+ if (*reginput != '\0')
+ return(0);
+ break;
+ case WORDA:
+ /* Must be looking at a letter, digit, or _ */
+ if ((!isalnum(*reginput)) && *reginput != '_')
+ return(0);
+ /* Prev must be BOL or nonword */
+ if (reginput > regbol &&
+ (isalnum(reginput[-1]) || reginput[-1] == '_'))
+ return(0);
+ break;
+ case WORDZ:
+ /* Must be looking at non letter, digit, or _ */
+ if (isalnum(*reginput) || *reginput == '_')
+ return(0);
+ /* We don't care what the previous char was */
+ break;
+ case ANY:
+ if (*reginput == '\0')
+ return(0);
+ reginput++;
+ break;
+ case EXACTLY: {
+ register int len;
+ register char *opnd;
+
+ opnd = OPERAND(scan);
+ /* Inline the first character, for speed. */
+ if (*opnd != *reginput)
+ return(0);
+ len = strlen(opnd);
+ if (len > 1 && strncmp(opnd, reginput, len) != 0)
+ return(0);
+ reginput += len;
+ }
+ break;
+ case ANYOF:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL)
+ return(0);
+ reginput++;
+ break;
+ case ANYBUT:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL)
+ return(0);
+ reginput++;
+ break;
+ case NOTHING:
+ break;
+ case BACK:
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9: {
+ register int no;
+ register char *save;
+
+ no = OP(scan) - OPEN;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set startp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regstartp[no] == NULL)
+ regstartp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9: {
+ register int no;
+ register char *save;
+
+ no = OP(scan) - CLOSE;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set endp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regendp[no] == NULL)
+ regendp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case BRANCH: {
+ register char *save;
+
+ if (OP(next) != BRANCH) /* No choice. */
+ next = OPERAND(scan); /* Avoid recursion. */
+ else {
+ do {
+ save = reginput;
+ if (regmatch(OPERAND(scan)))
+ return(1);
+ reginput = save;
+ scan = regnext(scan);
+ } while (scan != NULL && OP(scan) == BRANCH);
+ return(0);
+ /* NOTREACHED */
+ }
+ }
+ break;
+ case STAR:
+ case PLUS: {
+ register char nextch;
+ register int no;
+ register char *save;
+ register int min;
+
+ /*
+ * Lookahead to avoid useless match attempts
+ * when we know what character comes next.
+ */
+ nextch = '\0';
+ if (OP(next) == EXACTLY)
+ nextch = *OPERAND(next);
+ min = (OP(scan) == STAR) ? 0 : 1;
+ save = reginput;
+ no = regrepeat(OPERAND(scan));
+ while (no >= min) {
+ /* If it could work, try it. */
+ if (nextch == '\0' || *reginput == nextch)
+ if (regmatch(next))
+ return(1);
+ /* Couldn't or didn't -- back up. */
+ no--;
+ reginput = save + no;
+ }
+ return(0);
+ }
+ break;
+ case END:
+ return(1); /* Success! */
+ break;
+ default:
+ regerror("memory corruption");
+ return(0);
+ break;
+ }
+
+ scan = next;
+ }
+
+ /*
+ * We get here only if there's trouble -- normally "case END" is
+ * the terminating point.
+ */
+ regerror("corrupted pointers");
+ return(0);
+}
+
+/*
+ - regrepeat - repeatedly match something simple, report how many
+ */
+static int
+regrepeat( char *p )
+{
+ register int count = 0;
+ register char *scan;
+ register char *opnd;
+
+ scan = reginput;
+ opnd = OPERAND(p);
+ switch (OP(p)) {
+ case ANY:
+ count = strlen(scan);
+ scan += count;
+ break;
+ case EXACTLY:
+ while (*opnd == *scan) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYOF:
+ while (*scan != '\0' && strchr(opnd, *scan) != NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYBUT:
+ while (*scan != '\0' && strchr(opnd, *scan) == NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ default: /* Oh dear. Called inappropriately. */
+ regerror("internal foulup");
+ count = 0; /* Best compromise. */
+ break;
+ }
+ reginput = scan;
+
+ return(count);
+}
+
+/*
+ - regnext - dig the "next" pointer out of a node
+ */
+static char *
+regnext( register char *p )
+{
+ register int offset;
+
+ if (p == &regdummy)
+ return(NULL);
+
+ offset = NEXT(p);
+ if (offset == 0)
+ return(NULL);
+
+ if (OP(p) == BACK)
+ return(p-offset);
+ else
+ return(p+offset);
+}
+
+#ifdef DEBUG
+
+STATIC char *regprop();
+
+/*
+ - regdump - dump a regexp onto stdout in vaguely comprehensible form
+ */
+void
+regdump( regexp *r )
+{
+ register char *s;
+ register char op = EXACTLY; /* Arbitrary non-END op. */
+ register char *next;
+
+
+ s = r->program + 1;
+ while (op != END) { /* While that wasn't END last time... */
+ op = OP(s);
+ printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */
+ next = regnext(s);
+ if (next == NULL) /* Next ptr. */
+ printf("(0)");
+ else
+ printf("(%d)", (s-r->program)+(next-s));
+ s += 3;
+ if (op == ANYOF || op == ANYBUT || op == EXACTLY) {
+ /* Literal string, where present. */
+ while (*s != '\0') {
+ putchar(*s);
+ s++;
+ }
+ s++;
+ }
+ putchar('\n');
+ }
+
+ /* Header fields of interest. */
+ if (r->regstart != '\0')
+ printf("start `%c' ", r->regstart);
+ if (r->reganch)
+ printf("anchored ");
+ if (r->regmust != NULL)
+ printf("must have \"%s\"", r->regmust);
+ printf("\n");
+}
+
+/*
+ - regprop - printable representation of opcode
+ */
+static char *
+regprop( char *op )
+{
+ register char *p;
+ static char buf[50];
+
+ (void) strcpy(buf, ":");
+
+ switch (OP(op)) {
+ case BOL:
+ p = "BOL";
+ break;
+ case EOL:
+ p = "EOL";
+ break;
+ case ANY:
+ p = "ANY";
+ break;
+ case ANYOF:
+ p = "ANYOF";
+ break;
+ case ANYBUT:
+ p = "ANYBUT";
+ break;
+ case BRANCH:
+ p = "BRANCH";
+ break;
+ case EXACTLY:
+ p = "EXACTLY";
+ break;
+ case NOTHING:
+ p = "NOTHING";
+ break;
+ case BACK:
+ p = "BACK";
+ break;
+ case END:
+ p = "END";
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9:
+ sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN);
+ p = NULL;
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9:
+ sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE);
+ p = NULL;
+ break;
+ case STAR:
+ p = "STAR";
+ break;
+ case PLUS:
+ p = "PLUS";
+ break;
+ case WORDA:
+ p = "WORDA";
+ break;
+ case WORDZ:
+ p = "WORDZ";
+ break;
+ default:
+ regerror("corrupted opcode");
+ break;
+ }
+ if (p != NULL)
+ (void) strcat(buf, p);
+ return(buf);
+}
+#endif
+
+/*
+ * The following is provided for those people who do not have strcspn() in
+ * their C libraries. They should get off their butts and do something
+ * about it; at least one public-domain implementation of those (highly
+ * useful) string routines has been published on Usenet.
+ */
+#ifdef STRCSPN
+/*
+ * strcspn - find length of initial segment of s1 consisting entirely
+ * of characters not from s2
+ */
+
+static int
+strcspn(
+ char *s1,
+ char *s2 )
+{
+ register char *scan1;
+ register char *scan2;
+ register int count;
+
+ count = 0;
+ for (scan1 = s1; *scan1 != '\0'; scan1++) {
+ for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */
+ if (*scan1 == *scan2++)
+ return(count);
+ count++;
+ }
+ return(count);
+}
+#endif
diff --git a/jam-files/engine/regexp.h b/jam-files/engine/regexp.h
new file mode 100644
index 000000000..9d4604f60
--- /dev/null
+++ b/jam-files/engine/regexp.h
@@ -0,0 +1,32 @@
+/*
+ * Definitions etc. for regexp(3) routines.
+ *
+ * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof],
+ * not the System V one.
+ */
+#ifndef REGEXP_DWA20011023_H
+# define REGEXP_DWA20011023_H
+
+#define NSUBEXP 10
+typedef struct regexp {
+ char *startp[NSUBEXP];
+ char *endp[NSUBEXP];
+ char regstart; /* Internal use only. */
+ char reganch; /* Internal use only. */
+ char *regmust; /* Internal use only. */
+ int regmlen; /* Internal use only. */
+ char program[1]; /* Unwarranted chumminess with compiler. */
+} regexp;
+
+regexp *regcomp( char *exp );
+int regexec( regexp *prog, char *string );
+void regerror( char *s );
+
+/*
+ * The first byte of the regexp internal "program" is actually this magic
+ * number; the start node begins in the second byte.
+ */
+#define MAGIC 0234
+
+#endif
+
diff --git a/jam-files/engine/rules.c b/jam-files/engine/rules.c
new file mode 100644
index 000000000..a0be1d340
--- /dev/null
+++ b/jam-files/engine/rules.c
@@ -0,0 +1,810 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "lists.h"
+# include "parse.h"
+# include "variable.h"
+# include "rules.h"
+# include "newstr.h"
+# include "hash.h"
+# include "modules.h"
+# include "search.h"
+# include "lists.h"
+# include "pathsys.h"
+# include "timestamp.h"
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ *
+ * External routines:
+ *
+ * bindrule() - return pointer to RULE, creating it if necessary.
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ * touch_target() - mark a target to simulate being new.
+ * targetlist() - turn list of target names into a TARGET chain.
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ * actionlist() - append to an ACTION chain.
+ * addsettings() - add a deferred "set" command to a target.
+ * pushsettings() - set all target specific variables.
+ * popsettings() - reset target specific variables to their pre-push values.
+ * freesettings() - delete a settings list.
+ * rules_done() - free RULE and TARGET tables.
+ *
+ * 04/12/94 (seiwald) - actionlist() now just appends a single action.
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ */
+
+static void set_rule_actions( RULE *, rule_actions * );
+static void set_rule_body ( RULE *, argument_list *, PARSE * procedure );
+
+static struct hash * targethash = 0;
+
+struct _located_target
+{
+ char * file_name;
+ TARGET * target;
+};
+typedef struct _located_target LOCATED_TARGET ;
+
+static struct hash * located_targets = 0;
+
+
+/*
+ * target_include() - adds the 'included' TARGET to the list of targets included
+ * by the 'including' TARGET. Such targets are modeled as dependencies of the
+ * internal include node belonging to the 'including' TARGET.
+ */
+
+void target_include( TARGET * including, TARGET * included )
+{
+ TARGET * internal;
+ if ( !including->includes )
+ {
+ including->includes = copytarget( including );
+ including->includes->original_target = including;
+ }
+ internal = including->includes;
+ internal->depends = targetentry( internal->depends, included );
+}
+
+
+/*
+ * enter_rule() - return pointer to RULE, creating it if necessary in
+ * target_module.
+ */
+
+static RULE * enter_rule( char * rulename, module_t * target_module )
+{
+ RULE rule;
+ RULE * r = &rule;
+
+ r->name = rulename;
+
+ if ( hashenter( demand_rules( target_module ), (HASHDATA * *)&r ) )
+ {
+ r->name = newstr( rulename ); /* never freed */
+ r->procedure = (PARSE *)0;
+ r->module = 0;
+ r->actions = 0;
+ r->arguments = 0;
+ r->exported = 0;
+ r->module = target_module;
+#ifdef HAVE_PYTHON
+ r->python_function = 0;
+#endif
+ }
+ return r;
+}
+
+
+/*
+ * define_rule() - return pointer to RULE, creating it if necessary in
+ * target_module. Prepare it to accept a body or action originating in
+ * src_module.
+ */
+
+static RULE * define_rule
+(
+ module_t * src_module,
+ char * rulename,
+ module_t * target_module
+)
+{
+ RULE * r = enter_rule( rulename, target_module );
+ if ( r->module != src_module ) /* if the rule was imported from elsewhere, clear it now */
+ {
+ set_rule_body( r, 0, 0 );
+ set_rule_actions( r, 0 );
+ r->module = src_module; /* r will be executed in the source module */
+ }
+ return r;
+}
+
+
+void rule_free( RULE * r )
+{
+ freestr( r->name );
+ r->name = "";
+ parse_free( r->procedure );
+ r->procedure = 0;
+ if ( r->arguments )
+ args_free( r->arguments );
+ r->arguments = 0;
+ if ( r->actions )
+ actions_free( r->actions );
+ r->actions = 0;
+}
+
+
+/*
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ */
+
+TARGET * bindtarget( char const * target_name )
+{
+ TARGET target;
+ TARGET * t = &target;
+
+ if ( !targethash )
+ targethash = hashinit( sizeof( TARGET ), "targets" );
+
+ /* Perforce added const everywhere. No time to merge that change. */
+#ifdef NT
+ target_name = short_path_to_long_path( (char *)target_name );
+#endif
+ t->name = (char *)target_name;
+
+ if ( hashenter( targethash, (HASHDATA * *)&t ) )
+ {
+ memset( (char *)t, '\0', sizeof( *t ) );
+ t->name = newstr( (char *)target_name ); /* never freed */
+ t->boundname = t->name; /* default for T_FLAG_NOTFILE */
+ }
+
+ return t;
+}
+
+
+static void bind_explicitly_located_target( void * xtarget, void * data )
+{
+ TARGET * t = (TARGET *)xtarget;
+ if ( !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ /* Check if there's a setting for LOCATE */
+ SETTINGS * s = t->settings;
+ for ( ; s ; s = s->next )
+ {
+ if ( strcmp( s->symbol, "LOCATE" ) == 0 )
+ {
+ pushsettings( t->settings );
+ /* We are binding a target with explicit LOCATE. So third
+ * argument is of no use: nothing will be returned through it.
+ */
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( t->settings );
+ break;
+ }
+ }
+ }
+}
+
+
+void bind_explicitly_located_targets()
+{
+ if ( targethash )
+ hashenumerate( targethash, bind_explicitly_located_target, (void *)0 );
+}
+
+
+/* TODO: It is probably not a good idea to use functions in other modules like
+ this. */
+void call_bind_rule( char * target, char * boundname );
+
+
+TARGET * search_for_target ( char * name, LIST * search_path )
+{
+ PATHNAME f[1];
+ string buf[1];
+ LOCATED_TARGET lt;
+ LOCATED_TARGET * lta = &lt;
+ time_t time;
+ int found = 0;
+ TARGET * result;
+
+ string_new( buf );
+
+ path_parse( name, f );
+
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ while ( search_path )
+ {
+ f->f_root.ptr = search_path->string;
+ f->f_root.len = strlen( search_path->string );
+
+ string_truncate( buf, 0 );
+ path_build( f, buf, 1 );
+
+ lt.file_name = buf->value ;
+
+ if ( !located_targets )
+ located_targets = hashinit( sizeof(LOCATED_TARGET),
+ "located targets" );
+
+ if ( hashcheck( located_targets, (HASHDATA * *)&lta ) )
+ {
+ return lta->target;
+ }
+
+ timestamp( buf->value, &time );
+ if ( time )
+ {
+ found = 1;
+ break;
+ }
+
+ search_path = list_next( search_path );
+ }
+
+ if ( !found )
+ {
+ f->f_root.ptr = 0;
+ f->f_root.len = 0;
+
+ string_truncate( buf, 0 );
+ path_build( f, buf, 1 );
+
+ timestamp( buf->value, &time );
+ }
+
+ result = bindtarget( name );
+ result->boundname = newstr( buf->value );
+ result->time = time;
+ result->binding = time ? T_BIND_EXISTS : T_BIND_MISSING;
+
+ call_bind_rule( result->name, result->boundname );
+
+ string_free( buf );
+
+ return result;
+}
+
+
+/*
+ * copytarget() - make a new target with the old target's name.
+ *
+ * Not entered into hash table -- for internal nodes.
+ */
+
+TARGET * copytarget( const TARGET * ot )
+{
+ TARGET * t = (TARGET *)BJAM_MALLOC( sizeof( *t ) );
+ memset( (char *)t, '\0', sizeof( *t ) );
+ t->name = copystr( ot->name );
+ t->boundname = t->name;
+
+ t->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL;
+
+ return t;
+}
+
+
+/*
+ * touch_target() - mark a target to simulate being new.
+ */
+
+void touch_target( char * t )
+{
+ bindtarget( t )->flags |= T_FLAG_TOUCHED;
+}
+
+
+/*
+ * targetlist() - turn list of target names into a TARGET chain.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * targets list of target names
+ */
+
+TARGETS * targetlist( TARGETS * chain, LIST * target_names )
+{
+ for ( ; target_names; target_names = list_next( target_names ) )
+ chain = targetentry( chain, bindtarget( target_names->string ) );
+ return chain;
+}
+
+
+/*
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetentry( TARGETS * chain, TARGET * target )
+{
+ TARGETS * c = (TARGETS *)BJAM_MALLOC( sizeof( TARGETS ) );
+ c->target = target;
+
+ if ( !chain ) chain = c;
+ else chain->tail->next = c;
+ chain->tail = c;
+ c->next = 0;
+
+ return chain;
+}
+
+
+/*
+ * targetchain() - append two TARGET chains.
+ *
+ * Inputs:
+ * chain exisitng TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetchain( TARGETS * chain, TARGETS * targets )
+{
+ if ( !targets ) return chain;
+ if ( !chain ) return targets;
+
+ chain->tail->next = targets;
+ chain->tail = targets->tail;
+
+ return chain;
+}
+
+/*
+ * actionlist() - append to an ACTION chain.
+ */
+
+ACTIONS * actionlist( ACTIONS * chain, ACTION * action )
+{
+ ACTIONS * actions = (ACTIONS *)BJAM_MALLOC( sizeof( ACTIONS ) );
+
+ actions->action = action;
+
+ if ( !chain ) chain = actions;
+ else chain->tail->next = actions;
+ chain->tail = actions;
+ actions->next = 0;
+
+ return chain;
+}
+
+static SETTINGS * settings_freelist;
+
+
+/*
+ * addsettings() - add a deferred "set" command to a target.
+ *
+ * Adds a variable setting (varname=list) onto a chain of settings for a
+ * particular target. 'flag' controls the relationship between new and old
+ * values in the same way as in var_set() function (see variable.c). Returns
+ * the head of the settings chain.
+ */
+
+SETTINGS * addsettings( SETTINGS * head, int flag, char * symbol, LIST * value )
+{
+ SETTINGS * v;
+
+ /* Look for previous settings. */
+ for ( v = head; v; v = v->next )
+ if ( !strcmp( v->symbol, symbol ) )
+ break;
+
+ /* If not previously set, alloc a new. */
+ /* If appending, do so. */
+ /* Else free old and set new. */
+ if ( !v )
+ {
+ v = settings_freelist;
+
+ if ( v )
+ settings_freelist = v->next;
+ else
+ v = (SETTINGS *)BJAM_MALLOC( sizeof( *v ) );
+
+ v->symbol = newstr( symbol );
+ v->value = value;
+ v->next = head;
+ v->multiple = 0;
+ head = v;
+ }
+ else if ( flag == VAR_APPEND )
+ {
+ v->value = list_append( v->value, value );
+ }
+ else if ( flag != VAR_DEFAULT )
+ {
+ list_free( v->value );
+ v->value = value;
+ }
+ else
+ list_free( value );
+
+ /* Return (new) head of list. */
+ return head;
+}
+
+
+/*
+ * pushsettings() - set all target specific variables.
+ */
+
+void pushsettings( SETTINGS * v )
+{
+ for ( ; v; v = v->next )
+ v->value = var_swap( v->symbol, v->value );
+}
+
+
+/*
+ * popsettings() - reset target specific variables to their pre-push values.
+ */
+
+void popsettings( SETTINGS * v )
+{
+ pushsettings( v ); /* just swap again */
+}
+
+
+/*
+ * copysettings() - duplicate a settings list, returning the new copy.
+ */
+
+SETTINGS * copysettings( SETTINGS * head )
+{
+ SETTINGS * copy = 0;
+ SETTINGS * v;
+ for ( v = head; v; v = v->next )
+ copy = addsettings( copy, VAR_SET, v->symbol, list_copy( 0, v->value ) );
+ return copy;
+}
+
+
+/*
+ * freetargets() - delete a targets list.
+ */
+
+void freetargets( TARGETS * chain )
+{
+ while ( chain )
+ {
+ TARGETS * n = chain->next;
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freeactions() - delete an action list.
+ */
+
+void freeactions( ACTIONS * chain )
+{
+ while ( chain )
+ {
+ ACTIONS * n = chain->next;
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freesettings() - delete a settings list.
+ */
+
+void freesettings( SETTINGS * v )
+{
+ while ( v )
+ {
+ SETTINGS * n = v->next;
+ freestr( v->symbol );
+ list_free( v->value );
+ v->next = settings_freelist;
+ settings_freelist = v;
+ v = n;
+ }
+}
+
+
+static void freetarget( void * xt, void * data )
+{
+ TARGET * t = (TARGET *)xt;
+ if ( t->settings ) freesettings( t->settings );
+ if ( t->depends ) freetargets ( t->depends );
+ if ( t->includes ) freetarget ( t->includes, (void *)0 );
+ if ( t->actions ) freeactions ( t->actions );
+}
+
+
+/*
+ * rules_done() - free RULE and TARGET tables.
+ */
+
+void rules_done()
+{
+ hashenumerate( targethash, freetarget, 0 );
+ hashdone( targethash );
+ while ( settings_freelist )
+ {
+ SETTINGS * n = settings_freelist->next;
+ BJAM_FREE( settings_freelist );
+ settings_freelist = n;
+ }
+}
+
+
+/*
+ * args_new() - make a new reference-counted argument list.
+ */
+
+argument_list * args_new()
+{
+ argument_list * r = (argument_list *)BJAM_MALLOC( sizeof(argument_list) );
+ r->reference_count = 0;
+ lol_init( r->data );
+ return r;
+}
+
+
+/*
+ * args_refer() - add a new reference to the given argument list.
+ */
+
+void args_refer( argument_list * a )
+{
+ ++a->reference_count;
+}
+
+
+/*
+ * args_free() - release a reference to the given argument list.
+ */
+
+void args_free( argument_list * a )
+{
+ if ( --a->reference_count <= 0 )
+ {
+ lol_free( a->data );
+ BJAM_FREE( a );
+ }
+}
+
+
+/*
+ * actions_refer() - add a new reference to the given actions.
+ */
+
+void actions_refer( rule_actions * a )
+{
+ ++a->reference_count;
+}
+
+
+/*
+ * actions_free() - release a reference to the given actions.
+ */
+
+void actions_free( rule_actions * a )
+{
+ if ( --a->reference_count <= 0 )
+ {
+ freestr( a->command );
+ list_free( a->bindlist );
+ BJAM_FREE( a );
+ }
+}
+
+
+/*
+ * set_rule_body() - set the argument list and procedure of the given rule.
+ */
+
+static void set_rule_body( RULE * rule, argument_list * args, PARSE * procedure )
+{
+ if ( args )
+ args_refer( args );
+ if ( rule->arguments )
+ args_free( rule->arguments );
+ rule->arguments = args;
+
+ if ( procedure )
+ parse_refer( procedure );
+ if ( rule->procedure )
+ parse_free( rule->procedure );
+ rule->procedure = procedure;
+}
+
+
+/*
+ * global_name() - given a rule, return the name for a corresponding rule in the
+ * global module.
+ */
+
+static char * global_rule_name( RULE * r )
+{
+ if ( r->module == root_module() )
+ return r->name;
+
+ {
+ char name[4096] = "";
+ strncat( name, r->module->name, sizeof( name ) - 1 );
+ strncat( name, r->name, sizeof( name ) - 1 );
+ return newstr( name);
+ }
+}
+
+
+/*
+ * global_rule() - given a rule, produce the corresponding entry in the global
+ * module.
+ */
+
+static RULE * global_rule( RULE * r )
+{
+ if ( r->module == root_module() )
+ return r;
+
+ {
+ char * name = global_rule_name( r );
+ RULE * result = define_rule( r->module, name, root_module() );
+ freestr( name );
+ return result;
+ }
+}
+
+
+/*
+ * new_rule_body() - make a new rule named rulename in the given module, with
+ * the given argument list and procedure. If exported is true, the rule is
+ * exported to the global module as modulename.rulename.
+ */
+
+RULE * new_rule_body( module_t * m, char * rulename, argument_list * args, PARSE * procedure, int exported )
+{
+ RULE * local = define_rule( m, rulename, m );
+ local->exported = exported;
+ set_rule_body( local, args, procedure );
+
+ /* Mark the procedure with the global rule name, regardless of whether the
+ * rule is exported. That gives us something reasonably identifiable that we
+ * can use, e.g. in profiling output. Only do this once, since this could be
+ * called multiple times with the same procedure.
+ */
+ if ( procedure->rulename == 0 )
+ procedure->rulename = global_rule_name( local );
+
+ return local;
+}
+
+
+static void set_rule_actions( RULE * rule, rule_actions * actions )
+{
+ if ( actions )
+ actions_refer( actions );
+ if ( rule->actions )
+ actions_free( rule->actions );
+ rule->actions = actions;
+}
+
+
+static rule_actions * actions_new( char * command, LIST * bindlist, int flags )
+{
+ rule_actions * result = (rule_actions *)BJAM_MALLOC( sizeof( rule_actions ) );
+ result->command = copystr( command );
+ result->bindlist = bindlist;
+ result->flags = flags;
+ result->reference_count = 0;
+ return result;
+}
+
+
+RULE * new_rule_actions( module_t * m, char * rulename, char * command, LIST * bindlist, int flags )
+{
+ RULE * local = define_rule( m, rulename, m );
+ RULE * global = global_rule( local );
+ set_rule_actions( local, actions_new( command, bindlist, flags ) );
+ set_rule_actions( global, local->actions );
+ return local;
+}
+
+
+/*
+ * Looks for a rule in the specified module, and returns it, if found. First
+ * checks if the rule is present in the module's rule table. Second, if name of
+ * the rule is in the form name1.name2 and name1 is in the list of imported
+ * modules, look in module 'name1' for rule 'name2'.
+ */
+
+RULE * lookup_rule( char * rulename, module_t * m, int local_only )
+{
+ RULE rule;
+ RULE * r = &rule;
+ RULE * result = 0;
+ module_t * original_module = m;
+
+ r->name = rulename;
+
+ if ( m->class_module )
+ m = m->class_module;
+
+ if ( m->rules && hashcheck( m->rules, (HASHDATA * *)&r ) )
+ result = r;
+ else if ( !local_only && m->imported_modules )
+ {
+ /* Try splitting the name into module and rule. */
+ char *p = strchr( r->name, '.' ) ;
+ if ( p )
+ {
+ *p = '\0';
+ /* Now, r->name keeps the module name, and p+1 keeps the rule name.
+ */
+ if ( hashcheck( m->imported_modules, (HASHDATA * *)&r ) )
+ result = lookup_rule( p + 1, bindmodule( rulename ), 1 );
+ *p = '.';
+ }
+ }
+
+ if ( result )
+ {
+ if ( local_only && !result->exported )
+ result = 0;
+ else
+ {
+ /* Lookup started in class module. We have found a rule in class
+ * module, which is marked for execution in that module, or in some
+ * instances. Mark it for execution in the instance where we started
+ * the lookup.
+ */
+ int execute_in_class = ( result->module == m );
+ int execute_in_some_instance = ( result->module->class_module &&
+ ( result->module->class_module == m ) );
+ if ( ( original_module != m ) &&
+ ( execute_in_class || execute_in_some_instance ) )
+ result->module = original_module;
+ }
+ }
+
+ return result;
+}
+
+
+RULE * bindrule( char * rulename, module_t * m )
+{
+ RULE * result = lookup_rule( rulename, m, 0 );
+ if ( !result )
+ result = lookup_rule( rulename, root_module(), 0 );
+ /* We have only one caller, 'evaluate_rule', which will complain about
+ * calling an undefined rule. We could issue the error here, but we do not
+ * have the necessary information, such as frame.
+ */
+ if ( !result )
+ result = enter_rule( rulename, m );
+ return result;
+}
+
+
+RULE * import_rule( RULE * source, module_t * m, char * name )
+{
+ RULE * dest = define_rule( source->module, name, m );
+ set_rule_body( dest, source->arguments, source->procedure );
+ set_rule_actions( dest, source->actions );
+ return dest;
+}
diff --git a/jam-files/engine/rules.h b/jam-files/engine/rules.h
new file mode 100644
index 000000000..806a1469c
--- /dev/null
+++ b/jam-files/engine/rules.h
@@ -0,0 +1,280 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef RULES_DWA_20011020_H
+#define RULES_DWA_20011020_H
+
+#include "modules.h"
+#include "jam.h"
+#include "parse.h"
+
+
+/*
+ * rules.h - targets, rules, and related information
+ *
+ * This file describes the structures holding the targets, rules, and
+ * related information accumulated by interpreting the statements
+ * of the jam files.
+ *
+ * The following are defined:
+ *
+ * RULE - a generic jam rule, the product of RULE and ACTIONS.
+ * ACTIONS - a chain of ACTIONs.
+ * ACTION - a RULE instance with targets and sources.
+ * SETTINGS - variables to set when executing a TARGET's ACTIONS.
+ * TARGETS - a chain of TARGETs.
+ * TARGET - an entity (e.g. a file) that can be built.
+ *
+ * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
+ * 04/12/94 (seiwald) - actionlist() now just appends a single action.
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
+ * 01/19/95 (seiwald) - split DONTKNOW into CANTFIND/CANTMAKE.
+ * 02/02/95 (seiwald) - new LEAVES modifier on targets.
+ * 02/14/95 (seiwald) - new NOUPDATE modifier on targets.
+ */
+
+typedef struct _rule RULE;
+typedef struct _target TARGET;
+typedef struct _targets TARGETS;
+typedef struct _action ACTION;
+typedef struct _actions ACTIONS;
+typedef struct _settings SETTINGS ;
+
+/* RULE - a generic jam rule, the product of RULE and ACTIONS. */
+
+/* A rule's argument list. */
+struct argument_list
+{
+ int reference_count;
+ LOL data[1];
+};
+
+/* Build actions corresponding to a rule. */
+struct rule_actions
+{
+ int reference_count;
+ char * command; /* command string from ACTIONS */
+ LIST * bindlist;
+ int flags; /* modifiers on ACTIONS */
+
+#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */
+#define RULE_TOGETHER 0x02 /* combine actions on single target */
+#define RULE_IGNORE 0x04 /* ignore return status of executes */
+#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */
+#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */
+#define RULE_EXISTING 0x20 /* $(>) is pre-exisitng sources only */
+};
+
+typedef struct rule_actions rule_actions;
+typedef struct argument_list argument_list;
+
+struct _rule
+{
+ char * name;
+ PARSE * procedure; /* parse tree from RULE */
+ argument_list * arguments; /* argument checking info, or NULL for unchecked
+ */
+ rule_actions * actions; /* build actions, or NULL for no actions */
+ module_t * module; /* module in which this rule is executed */
+ int exported; /* nonzero if this rule is supposed to appear in
+ * the global module and be automatically
+ * imported into other modules
+ */
+#ifdef HAVE_PYTHON
+ PyObject * python_function;
+#endif
+};
+
+/* ACTIONS - a chain of ACTIONs. */
+struct _actions
+{
+ ACTIONS * next;
+ ACTIONS * tail; /* valid only for head */
+ ACTION * action;
+};
+
+/* ACTION - a RULE instance with targets and sources. */
+struct _action
+{
+ RULE * rule;
+ TARGETS * targets;
+ TARGETS * sources; /* aka $(>) */
+ char running; /* has been started */
+ char status; /* see TARGET status */
+};
+
+/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */
+struct _settings
+{
+ SETTINGS * next;
+ char * symbol; /* symbol name for var_set() */
+ LIST * value; /* symbol value for var_set() */
+ int multiple;
+};
+
+/* TARGETS - a chain of TARGETs. */
+struct _targets
+{
+ TARGETS * next;
+ TARGETS * tail; /* valid only for head */
+ TARGET * target;
+};
+
+/* TARGET - an entity (e.g. a file) that can be built. */
+struct _target
+{
+ char * name;
+ char * boundname; /* if search() relocates target */
+ ACTIONS * actions; /* rules to execute, if any */
+ SETTINGS * settings; /* variables to define */
+
+ short flags; /* status info */
+
+#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */
+#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */
+#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */
+#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */
+#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */
+#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */
+#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */
+
+/* This flag has been added to support a new built-in rule named "RMBAD". It is
+ * used to force removal of outdated targets whose dependencies fail to build.
+ */
+#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */
+
+/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used
+ * to indicate that the result of running a given action should be inverted,
+ * i.e. ok <=> fail. This is useful for launching certain test runs from a
+ * Jamfile.
+ */
+#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */
+
+#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */
+
+/* Indicates that the target must be a file. This prevents matching non-files,
+ * like directories, when a target is searched.
+ */
+#define T_FLAG_ISFILE 0x0400
+
+#define T_FLAG_PRECIOUS 0x0800
+
+ char binding; /* how target relates to a real file or
+ * folder
+ */
+
+#define T_BIND_UNBOUND 0 /* a disembodied name */
+#define T_BIND_MISSING 1 /* could not find real file */
+#define T_BIND_PARENTS 2 /* using parent's timestamp */
+#define T_BIND_EXISTS 3 /* real file, timestamp valid */
+
+ TARGETS * depends; /* dependencies */
+ TARGETS * dependants; /* the inverse of dependencies */
+ TARGETS * rebuilds; /* targets that should be force-rebuilt
+ * whenever this one is
+ */
+ TARGET * includes; /* internal includes node */
+ TARGET * original_target; /* original_target->includes = this */
+ char rescanned;
+
+ time_t time; /* update time */
+ time_t leaf; /* update time of leaf sources */
+
+ char fate; /* make0()'s diagnosis */
+
+#define T_FATE_INIT 0 /* nothing done to target */
+#define T_FATE_MAKING 1 /* make0(target) on stack */
+
+#define T_FATE_STABLE 2 /* target did not need updating */
+#define T_FATE_NEWER 3 /* target newer than parent */
+
+#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */
+#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */
+
+#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */
+#define T_FATE_TOUCHED 5 /* manually touched with -t */
+#define T_FATE_REBUILD 6
+#define T_FATE_MISSING 7 /* is missing, needs updating */
+#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */
+#define T_FATE_OUTDATED 9 /* is out of date, needs updating */
+#define T_FATE_UPDATE 10 /* deps updated, needs updating */
+
+#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */
+#define T_FATE_CANTFIND 11 /* no rules to make missing target */
+#define T_FATE_CANTMAKE 12 /* can not find dependencies */
+
+ char progress; /* tracks make1() progress */
+
+#define T_MAKE_INIT 0 /* make1(target) not yet called */
+#define T_MAKE_ONSTACK 1 /* make1(target) on stack */
+#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */
+#define T_MAKE_RUNNING 3 /* make1(target) running commands */
+#define T_MAKE_DONE 4 /* make1(target) done */
+
+#ifdef OPT_SEMAPHORE
+ #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */
+#endif
+
+#ifdef OPT_SEMAPHORE
+ TARGET * semaphore; /* used in serialization */
+#endif
+
+ char status; /* exec_cmd() result */
+
+ int asynccnt; /* child deps outstanding */
+ TARGETS * parents; /* used by make1() for completion */
+ char * cmds; /* type-punned command list */
+
+ char * failed;
+};
+
+
+/* Action related functions. */
+ACTIONS * actionlist ( ACTIONS *, ACTION * );
+void freeactions ( ACTIONS * );
+SETTINGS * addsettings ( SETTINGS *, int flag, char * symbol, LIST * value );
+void pushsettings ( SETTINGS * );
+void popsettings ( SETTINGS * );
+SETTINGS * copysettings ( SETTINGS * );
+void freesettings ( SETTINGS * );
+void actions_refer( rule_actions * );
+void actions_free ( rule_actions * );
+
+/* Argument list related functions. */
+void args_free ( argument_list * );
+argument_list * args_new ();
+void args_refer( argument_list * );
+
+/* Rule related functions. */
+RULE * bindrule ( char * rulename, module_t * );
+RULE * import_rule ( RULE * source, module_t *, char * name );
+RULE * new_rule_body ( module_t *, char * rulename, argument_list *, PARSE * procedure, int exprt );
+RULE * new_rule_actions( module_t *, char * rulename, char * command, LIST * bindlist, int flags );
+void rule_free ( RULE * );
+
+/* Target related functions. */
+void bind_explicitly_located_targets();
+TARGET * bindtarget ( char const * target_name );
+TARGET * copytarget ( TARGET const * t );
+void freetargets ( TARGETS * );
+TARGET * search_for_target ( char * name, LIST * search_path );
+TARGETS * targetchain ( TARGETS * chain, TARGETS * );
+TARGETS * targetentry ( TARGETS * chain, TARGET * );
+void target_include ( TARGET * including, TARGET * included );
+TARGETS * targetlist ( TARGETS * chain, LIST * target_names );
+void touch_target ( char * t );
+
+/* Final module cleanup. */
+void rules_done();
+
+#endif
diff --git a/jam-files/engine/scan.c b/jam-files/engine/scan.c
new file mode 100644
index 000000000..11c44c0e2
--- /dev/null
+++ b/jam-files/engine/scan.c
@@ -0,0 +1,418 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "jamgram.h"
+#include "jambase.h"
+#include "newstr.h"
+
+/*
+ * scan.c - the jam yacc scanner
+ *
+ * 12/26/93 (seiwald) - bump buf in yylex to 10240 - yuk.
+ * 09/16/94 (seiwald) - check for overflows, unmatched {}'s, etc.
+ * Also handle tokens abutting EOF by remembering
+ * to return EOF now matter how many times yylex()
+ * reinvokes yyline().
+ * 02/11/95 (seiwald) - honor only punctuation keywords if SCAN_PUNCT.
+ * 07/27/95 (seiwald) - Include jamgram.h after scan.h, so that YYSTYPE is
+ * defined before Linux's yacc tries to redefine it.
+ */
+
+struct keyword
+{
+ char * word;
+ int type;
+} keywords[] =
+{
+#include "jamgramtab.h"
+ { 0, 0 }
+};
+
+struct include
+{
+ struct include * next; /* next serial include file */
+ char * string; /* pointer into current line */
+ char * * strings; /* for yyfparse() -- text to parse */
+ FILE * file; /* for yyfparse() -- file being read */
+ char * fname; /* for yyfparse() -- file name */
+ int line; /* line counter for error messages */
+ char buf[ 512 ]; /* for yyfparse() -- line buffer */
+};
+
+static struct include * incp = 0; /* current file; head of chain */
+
+static int scanmode = SCAN_NORMAL;
+static int anyerrors = 0;
+
+
+static char * symdump( YYSTYPE * );
+
+#define BIGGEST_TOKEN 10240 /* no single token can be larger */
+
+
+/*
+ * Set parser mode: normal, string, or keyword.
+ */
+
+void yymode( int n )
+{
+ scanmode = n;
+}
+
+
+void yyerror( char * s )
+{
+ /* We use yylval instead of incp to access the error location information as
+ * the incp pointer will already be reset to 0 in case the error occurred at
+ * EOF.
+ *
+ * The two may differ only if we get an error while reading a lexical token
+ * spanning muliple lines, e.g. a multi-line string literal or action body,
+ * in which case yylval location information will hold the information about
+ * where this token started while incp will hold the information about where
+ * reading it broke.
+ *
+ * TODO: Test the theory about when yylval and incp location information are
+ * the same and when they differ.
+ */
+ printf( "%s:%d: %s at %s\n", yylval.file, yylval.line, s, symdump( &yylval ) );
+ ++anyerrors;
+}
+
+
+int yyanyerrors()
+{
+ return anyerrors != 0;
+}
+
+
+void yyfparse( char * s )
+{
+ struct include * i = (struct include *)BJAM_MALLOC( sizeof( *i ) );
+
+ /* Push this onto the incp chain. */
+ i->string = "";
+ i->strings = 0;
+ i->file = 0;
+ i->fname = copystr( s );
+ i->line = 0;
+ i->next = incp;
+ incp = i;
+
+ /* If the filename is "+", it means use the internal jambase. */
+ if ( !strcmp( s, "+" ) )
+ i->strings = jambase;
+}
+
+
+/*
+ * yyline() - read new line and return first character.
+ *
+ * Fabricates a continuous stream of characters across include files, returning
+ * EOF at the bitter end.
+ */
+
+int yyline()
+{
+ struct include * i = incp;
+
+ if ( !incp )
+ return EOF;
+
+ /* Once we start reading from the input stream, we reset the include
+ * insertion point so that the next include file becomes the head of the
+ * list.
+ */
+
+ /* If there is more data in this line, return it. */
+ if ( *i->string )
+ return *i->string++;
+
+ /* If we are reading from an internal string list, go to the next string. */
+ if ( i->strings )
+ {
+ if ( *i->strings )
+ {
+ ++i->line;
+ i->string = *(i->strings++);
+ return *i->string++;
+ }
+ }
+ else
+ {
+ /* If necessary, open the file. */
+ if ( !i->file )
+ {
+ FILE * f = stdin;
+ if ( strcmp( i->fname, "-" ) && !( f = fopen( i->fname, "r" ) ) )
+ perror( i->fname );
+ i->file = f;
+ }
+
+ /* If there is another line in this file, start it. */
+ if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) )
+ {
+ ++i->line;
+ i->string = i->buf;
+ return *i->string++;
+ }
+ }
+
+ /* This include is done. Free it up and return EOF so yyparse() returns to
+ * parse_file().
+ */
+
+ incp = i->next;
+
+ /* Close file, free name. */
+ if ( i->file && ( i->file != stdin ) )
+ fclose( i->file );
+ freestr( i->fname );
+ BJAM_FREE( (char *)i );
+
+ return EOF;
+}
+
+
+/*
+ * yylex() - set yylval to current token; return its type.
+ *
+ * Macros to move things along:
+ *
+ * yychar() - return and advance character; invalid after EOF.
+ * yyprev() - back up one character; invalid before yychar().
+ *
+ * yychar() returns a continuous stream of characters, until it hits the EOF of
+ * the current include file.
+ */
+
+#define yychar() ( *incp->string ? *incp->string++ : yyline() )
+#define yyprev() ( incp->string-- )
+
+int yylex()
+{
+ int c;
+ char buf[ BIGGEST_TOKEN ];
+ char * b = buf;
+
+ if ( !incp )
+ goto eof;
+
+ /* Get first character (whitespace or of token). */
+ c = yychar();
+
+ if ( scanmode == SCAN_STRING )
+ {
+ /* If scanning for a string (action's {}'s), look for the closing brace.
+ * We handle matching braces, if they match.
+ */
+
+ int nest = 1;
+
+ while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) )
+ {
+ if ( c == '{' )
+ ++nest;
+
+ if ( ( c == '}' ) && !--nest )
+ break;
+
+ *b++ = c;
+
+ c = yychar();
+
+ /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */
+ if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) )
+ --b;
+ }
+
+ /* We ate the ending brace -- regurgitate it. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Check for obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "action block too big" );
+ goto eof;
+ }
+
+ if ( nest )
+ {
+ yyerror( "unmatched {} in action block" );
+ goto eof;
+ }
+
+ *b = 0;
+ yylval.type = STRING;
+ yylval.string = newstr( buf );
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+ }
+ else
+ {
+ char * b = buf;
+ struct keyword * k;
+ int inquote = 0;
+ int notkeyword;
+
+ /* Eat white space. */
+ for ( ;; )
+ {
+ /* Skip past white space. */
+ while ( ( c != EOF ) && isspace( c ) )
+ c = yychar();
+
+ /* Not a comment? */
+ if ( c != '#' )
+ break;
+
+ /* Swallow up comment line. */
+ while ( ( ( c = yychar() ) != EOF ) && ( c != '\n' ) ) ;
+ }
+
+ /* c now points to the first character of a token. */
+ if ( c == EOF )
+ goto eof;
+
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+
+ /* While scanning the word, disqualify it for (expensive) keyword lookup
+ * when we can: $anything, "anything", \anything
+ */
+ notkeyword = c == '$';
+
+ /* Look for white space to delimit word. "'s get stripped but preserve
+ * white space. \ protects next character.
+ */
+ while
+ (
+ ( c != EOF ) &&
+ ( b < buf + sizeof( buf ) ) &&
+ ( inquote || !isspace( c ) )
+ )
+ {
+ if ( c == '"' )
+ {
+ /* begin or end " */
+ inquote = !inquote;
+ notkeyword = 1;
+ }
+ else if ( c != '\\' )
+ {
+ /* normal char */
+ *b++ = c;
+ }
+ else if ( ( c = yychar() ) != EOF )
+ {
+ /* \c */
+ if (c == 'n')
+ c = '\n';
+ else if (c == 'r')
+ c = '\r';
+ else if (c == 't')
+ c = '\t';
+ *b++ = c;
+ notkeyword = 1;
+ }
+ else
+ {
+ /* \EOF */
+ break;
+ }
+
+ c = yychar();
+ }
+
+ /* Check obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "string too big" );
+ goto eof;
+ }
+
+ if ( inquote )
+ {
+ yyerror( "unmatched \" in string" );
+ goto eof;
+ }
+
+ /* We looked ahead a character - back up. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Scan token table. Do not scan if it is obviously not a keyword or if
+ * it is an alphabetic when were looking for punctuation.
+ */
+
+ *b = 0;
+ yylval.type = ARG;
+
+ if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT ) ) )
+ for ( k = keywords; k->word; ++k )
+ if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) )
+ {
+ yylval.type = k->type;
+ yylval.string = k->word; /* used by symdump */
+ break;
+ }
+
+ if ( yylval.type == ARG )
+ yylval.string = newstr( buf );
+ }
+
+ if ( DEBUG_SCAN )
+ printf( "scan %s\n", symdump( &yylval ) );
+
+ return yylval.type;
+
+eof:
+ /* We do not reset yylval.file & yylval.line here so unexpected EOF error
+ * messages would include correct error location information.
+ */
+ yylval.type = EOF;
+ return yylval.type;
+}
+
+
+static char * symdump( YYSTYPE * s )
+{
+ static char buf[ BIGGEST_TOKEN + 20 ];
+ switch ( s->type )
+ {
+ case EOF : sprintf( buf, "EOF" ); break;
+ case 0 : sprintf( buf, "unknown symbol %s", s->string ); break;
+ case ARG : sprintf( buf, "argument %s" , s->string ); break;
+ case STRING: sprintf( buf, "string \"%s\"" , s->string ); break;
+ default : sprintf( buf, "keyword %s" , s->string ); break;
+ }
+ return buf;
+}
+
+
+/*
+ * Get information about the current file and line, for those epsilon
+ * transitions that produce a parse.
+ */
+
+void yyinput_stream( char * * name, int * line )
+{
+ if ( incp )
+ {
+ *name = incp->fname;
+ *line = incp->line;
+ }
+ else
+ {
+ *name = "(builtin)";
+ *line = -1;
+ }
+}
diff --git a/jam-files/engine/scan.h b/jam-files/engine/scan.h
new file mode 100644
index 000000000..3fad1c24c
--- /dev/null
+++ b/jam-files/engine/scan.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * scan.h - the jam yacc scanner
+ *
+ * External functions:
+ *
+ * yyerror( char *s ) - print a parsing error message.
+ * yyfparse( char *s ) - scan include file s.
+ * yylex() - parse the next token, returning its type.
+ * yymode() - adjust lexicon of scanner.
+ * yyparse() - declaration for yacc parser.
+ * yyanyerrors() - indicate if any parsing errors occured.
+ *
+ * The yymode() function is for the parser to adjust the lexicon of the scanner.
+ * Aside from normal keyword scanning, there is a mode to handle action strings
+ * (look only for the closing }) and a mode to ignore most keywords when looking
+ * for a punctuation keyword. This allows non-punctuation keywords to be used in
+ * lists without quoting.
+ */
+
+/*
+ * YYSTYPE - value of a lexical token
+ */
+
+#define YYSTYPE YYSYMBOL
+
+typedef struct _YYSTYPE
+{
+ int type;
+ char * string;
+ PARSE * parse;
+ LIST * list;
+ int number;
+ char * file;
+ int line;
+} YYSTYPE;
+
+extern YYSTYPE yylval;
+
+void yymode( int n );
+void yyerror( char * s );
+int yyanyerrors();
+void yyfparse( char * s );
+int yyline();
+int yylex();
+int yyparse();
+void yyinput_stream( char * * name, int * line );
+
+# define SCAN_NORMAL 0 /* normal parsing */
+# define SCAN_STRING 1 /* look only for matching } */
+# define SCAN_PUNCT 2 /* only punctuation keywords */
diff --git a/jam-files/engine/search.c b/jam-files/engine/search.c
new file mode 100644
index 000000000..6c23d97a1
--- /dev/null
+++ b/jam-files/engine/search.c
@@ -0,0 +1,223 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "search.h"
+#include "timestamp.h"
+#include "pathsys.h"
+#include "variable.h"
+#include "newstr.h"
+#include "compile.h"
+#include "strings.h"
+#include "hash.h"
+#include "filesys.h"
+#include <string.h>
+
+
+typedef struct _binding
+{
+ char * binding;
+ char * target;
+} BINDING;
+
+static struct hash *explicit_bindings = 0;
+
+
+void call_bind_rule
+(
+ char * target_,
+ char * boundname_
+)
+{
+ LIST * bind_rule = var_get( "BINDRULE" );
+ if ( bind_rule )
+ {
+ /* No guarantee that the target is an allocated string, so be on the
+ * safe side.
+ */
+ char * target = copystr( target_ );
+
+ /* Likewise, do not rely on implementation details of newstr.c: allocate
+ * a copy of boundname.
+ */
+ char * boundname = copystr( boundname_ );
+ if ( boundname && target )
+ {
+ /* Prepare the argument list. */
+ FRAME frame[1];
+ frame_init( frame );
+
+ /* First argument is the target name. */
+ lol_add( frame->args, list_new( L0, target ) );
+
+ lol_add( frame->args, list_new( L0, boundname ) );
+ if ( lol_get( frame->args, 1 ) )
+ evaluate_rule( bind_rule->string, frame );
+
+ /* Clean up */
+ frame_free( frame );
+ }
+ else
+ {
+ if ( boundname )
+ freestr( boundname );
+ if ( target )
+ freestr( target );
+ }
+ }
+}
+
+/*
+ * search.c - find a target along $(SEARCH) or $(LOCATE)
+ * First, check if LOCATE is set. If so, use it to determine
+ * the location of target and return, regardless of whether anything
+ * exists on that location.
+ *
+ * Second, examine all directories in SEARCH. If there's file already
+ * or there's another target with the same name which was placed
+ * to this location via LOCATE setting, stop and return the location.
+ * In case of previous target, return it's name via the third argument.
+ *
+ * This bevahiour allow to handle dependency on generated files. If
+ * caller does not expect that target is generated, 0 can be passed as
+ * the third argument.
+ */
+
+char *
+search(
+ char *target,
+ time_t *time,
+ char **another_target,
+ int file
+)
+{
+ PATHNAME f[1];
+ LIST *varlist;
+ string buf[1];
+ int found = 0;
+ /* Will be set to 1 if target location is specified via LOCATE. */
+ int explicitly_located = 0;
+ char *boundname = 0;
+
+ if ( another_target )
+ *another_target = 0;
+
+ if (! explicit_bindings )
+ explicit_bindings = hashinit( sizeof(BINDING),
+ "explicitly specified locations");
+
+ string_new( buf );
+ /* Parse the filename */
+
+ path_parse( target, f );
+
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ if ( ( varlist = var_get( "LOCATE" ) ) )
+ {
+ f->f_root.ptr = varlist->string;
+ f->f_root.len = strlen( varlist->string );
+
+ path_build( f, buf, 1 );
+
+ if ( DEBUG_SEARCH )
+ printf( "locate %s: %s\n", target, buf->value );
+
+ explicitly_located = 1;
+
+ timestamp( buf->value, time );
+ found = 1;
+ }
+ else if ( ( varlist = var_get( "SEARCH" ) ) )
+ {
+ while ( varlist )
+ {
+ BINDING b, *ba = &b;
+ file_info_t *ff;
+
+ f->f_root.ptr = varlist->string;
+ f->f_root.len = strlen( varlist->string );
+
+ string_truncate( buf, 0 );
+ path_build( f, buf, 1 );
+
+ if ( DEBUG_SEARCH )
+ printf( "search %s: %s\n", target, buf->value );
+
+ ff = file_query(buf->value);
+ timestamp( buf->value, time );
+
+ b.binding = buf->value;
+
+ if ( hashcheck( explicit_bindings, (HASHDATA**)&ba ) )
+ {
+ if ( DEBUG_SEARCH )
+ printf(" search %s: found explicitly located target %s\n",
+ target, ba->target);
+ if ( another_target )
+ *another_target = ba->target;
+ found = 1;
+ break;
+ }
+ else if ( ff && ff->time )
+ {
+ if ( !file || ff->is_file )
+ {
+ found = 1;
+ break;
+ }
+ }
+
+ varlist = list_next( varlist );
+ }
+ }
+
+ if ( !found )
+ {
+ /* Look for the obvious */
+ /* This is a questionable move. Should we look in the */
+ /* obvious place if SEARCH is set? */
+
+ f->f_root.ptr = 0;
+ f->f_root.len = 0;
+
+ string_truncate( buf, 0 );
+ path_build( f, buf, 1 );
+
+ if ( DEBUG_SEARCH )
+ printf( "search %s: %s\n", target, buf->value );
+
+ timestamp( buf->value, time );
+ }
+
+ boundname = newstr( buf->value );
+ string_free( buf );
+
+ if ( explicitly_located )
+ {
+ BINDING b;
+ BINDING * ba = &b;
+ b.binding = boundname;
+ b.target = target;
+ /* CONSIDER: we probably should issue a warning is another file
+ is explicitly bound to the same location. This might break
+ compatibility, though. */
+ hashenter( explicit_bindings, (HASHDATA * *)&ba );
+ }
+
+ /* prepare a call to BINDRULE if the variable is set */
+ call_bind_rule( target, boundname );
+
+ return boundname;
+}
diff --git a/jam-files/engine/search.h b/jam-files/engine/search.h
new file mode 100644
index 000000000..c364cac03
--- /dev/null
+++ b/jam-files/engine/search.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * search.h - find a target along $(SEARCH) or $(LOCATE)
+ */
+
+char *search( char *target, time_t *time, char **another_target, int file );
diff --git a/jam-files/engine/strings.c b/jam-files/engine/strings.c
new file mode 100644
index 000000000..895612377
--- /dev/null
+++ b/jam-files/engine/strings.c
@@ -0,0 +1,201 @@
+/* Copyright David Abrahams 2004. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "jam.h"
+#include "strings.h"
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include <stdio.h>
+
+
+#ifndef NDEBUG
+# define JAM_STRING_MAGIC ((char)0xcf)
+# define JAM_STRING_MAGIC_SIZE 4
+static void assert_invariants( string* self )
+{
+ int i;
+
+ if ( self->value == 0 )
+ {
+ assert( self->size == 0 );
+ assert( self->capacity == 0 );
+ assert( self->opt[0] == 0 );
+ return;
+ }
+
+ assert( self->size < self->capacity );
+ assert( ( self->capacity <= sizeof(self->opt) ) == ( self->value == self->opt ) );
+ assert( strlen( self->value ) == self->size );
+
+ for (i = 0; i < 4; ++i)
+ {
+ assert( self->magic[i] == JAM_STRING_MAGIC );
+ assert( self->value[self->capacity + i] == JAM_STRING_MAGIC );
+ }
+}
+#else
+# define JAM_STRING_MAGIC_SIZE 0
+# define assert_invariants(x) do {} while (0)
+#endif
+
+void string_new( string* s )
+{
+ s->value = s->opt;
+ s->size = 0;
+ s->capacity = sizeof(s->opt);
+ s->opt[0] = 0;
+#ifndef NDEBUG
+ memset(s->magic, JAM_STRING_MAGIC, sizeof(s->magic));
+#endif
+ assert_invariants( s );
+}
+
+void string_free( string* s )
+{
+ assert_invariants( s );
+ if ( s->value != s->opt )
+ BJAM_FREE( s->value );
+ string_new( s );
+}
+
+static void string_reserve_internal( string* self, size_t capacity )
+{
+ if ( self->value == self->opt )
+ {
+ self->value = (char*)BJAM_MALLOC_ATOMIC( capacity + JAM_STRING_MAGIC_SIZE );
+ self->value[0] = 0;
+ strncat( self->value, self->opt, sizeof(self->opt) );
+ assert( strlen( self->value ) <= self->capacity ); /* This is a regression test */
+ }
+ else
+ {
+ self->value = (char*)BJAM_REALLOC( self->value, capacity + JAM_STRING_MAGIC_SIZE );
+ }
+#ifndef NDEBUG
+ memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE );
+#endif
+ self->capacity = capacity;
+}
+
+void string_reserve( string* self, size_t capacity )
+{
+ assert_invariants( self );
+ if ( capacity <= self->capacity )
+ return;
+ string_reserve_internal( self, capacity );
+ assert_invariants( self );
+}
+
+static void extend_full( string* self, char const* start, char const* finish )
+{
+ size_t new_size = self->capacity + ( finish - start );
+ size_t new_capacity = self->capacity;
+ size_t old_size = self->capacity;
+ while ( new_capacity < new_size + 1)
+ new_capacity <<= 1;
+ string_reserve_internal( self, new_capacity );
+ memcpy( self->value + old_size, start, new_size - old_size );
+ self->value[new_size] = 0;
+ self->size = new_size;
+}
+
+void string_append( string* self, char const* rhs )
+{
+ char* p = self->value + self->size;
+ char* end = self->value + self->capacity;
+ assert_invariants( self );
+
+ while ( *rhs && p != end)
+ *p++ = *rhs++;
+
+ if ( p != end )
+ {
+ *p = 0;
+ self->size = p - self->value;
+ }
+ else
+ {
+ extend_full( self, rhs, rhs + strlen(rhs) );
+ }
+ assert_invariants( self );
+}
+
+void string_append_range( string* self, char const* start, char const* finish )
+{
+ char* p = self->value + self->size;
+ char* end = self->value + self->capacity;
+ assert_invariants( self );
+
+ while ( p != end && start != finish )
+ *p++ = *start++;
+
+ if ( p != end )
+ {
+ *p = 0;
+ self->size = p - self->value;
+ }
+ else
+ {
+ extend_full( self, start, finish );
+ }
+ assert_invariants( self );
+}
+
+void string_copy( string* s, char const* rhs )
+{
+ string_new( s );
+ string_append( s, rhs );
+}
+
+void string_truncate( string* self, size_t n )
+{
+ assert_invariants( self );
+ assert( n <= self->capacity );
+ self->value[self->size = n] = 0;
+ assert_invariants( self );
+}
+
+void string_pop_back( string* self )
+{
+ string_truncate( self, self->size - 1 );
+}
+
+void string_push_back( string* self, char x )
+{
+ string_append_range( self, &x, &x + 1 );
+}
+
+char string_back( string* self )
+{
+ assert_invariants( self );
+ return self->value[self->size - 1];
+}
+
+#ifndef NDEBUG
+void string_unit_test()
+{
+ string s[1];
+ int i;
+ char buffer[sizeof(s->opt) * 2 + 2];
+ int limit = sizeof(buffer) > 254 ? 254 : sizeof(buffer);
+
+ string_new(s);
+
+ for (i = 0; i < limit; ++i)
+ {
+ string_push_back( s, (char)(i + 1) );
+ };
+
+ for (i = 0; i < limit; ++i)
+ {
+ assert( i < s->size );
+ assert( s->value[i] == (char)(i + 1));
+ }
+
+ string_free(s);
+
+}
+#endif
+
diff --git a/jam-files/engine/strings.h b/jam-files/engine/strings.h
new file mode 100644
index 000000000..33c77bd7f
--- /dev/null
+++ b/jam-files/engine/strings.h
@@ -0,0 +1,34 @@
+#ifndef STRINGS_DWA20011024_H
+# define STRINGS_DWA20011024_H
+
+/* Copyright David Abrahams 2004. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+# include <stddef.h>
+
+typedef struct string
+{
+ char* value;
+ unsigned long size;
+ unsigned long capacity;
+ char opt[32];
+#ifndef NDEBUG
+ char magic[4];
+#endif
+} string;
+
+void string_new( string* );
+void string_copy( string*, char const* );
+void string_free( string* );
+void string_append( string*, char const* );
+void string_append_range( string*, char const*, char const* );
+void string_push_back( string* s, char x );
+void string_reserve( string*, size_t );
+void string_truncate( string*, size_t );
+void string_pop_back( string* );
+char string_back( string* );
+void string_unit_test();
+
+#endif
+
diff --git a/jam-files/engine/subst.c b/jam-files/engine/subst.c
new file mode 100644
index 000000000..75524ecc1
--- /dev/null
+++ b/jam-files/engine/subst.c
@@ -0,0 +1,94 @@
+#include <stddef.h>
+#include "jam.h"
+#include "regexp.h"
+#include "hash.h"
+
+#include "newstr.h"
+#include "lists.h"
+#include "parse.h"
+#include "compile.h"
+#include "frames.h"
+
+struct regex_entry
+{
+ const char* pattern;
+ regexp* regex;
+};
+typedef struct regex_entry regex_entry;
+
+static struct hash* regex_hash;
+
+regexp* regex_compile( const char* pattern )
+{
+ regex_entry entry, *e = &entry;
+ entry.pattern = pattern;
+
+ if ( !regex_hash )
+ regex_hash = hashinit(sizeof(regex_entry), "regex");
+
+ if ( hashenter( regex_hash, (HASHDATA **)&e ) )
+ e->regex = regcomp( (char*)pattern );
+
+ return e->regex;
+}
+
+LIST*
+builtin_subst(
+ PARSE *parse,
+ FRAME *frame )
+{
+ LIST* result = L0;
+ LIST* arg1 = lol_get( frame->args, 0 );
+
+ if ( arg1 && list_next(arg1) && list_next(list_next(arg1)) )
+ {
+
+ const char* source = arg1->string;
+ const char* pattern = list_next(arg1)->string;
+ regexp* repat = regex_compile( pattern );
+
+ if ( regexec( repat, (char*)source) )
+ {
+ LIST* subst = list_next(arg1);
+
+ while ((subst = list_next(subst)) != L0)
+ {
+# define BUFLEN 4096
+ char buf[BUFLEN + 1];
+ const char* in = subst->string;
+ char* out = buf;
+
+ for ( in = subst->string; *in && out < buf + BUFLEN; ++in )
+ {
+ if ( *in == '\\' || *in == '$' )
+ {
+ ++in;
+ if ( *in == 0 )
+ {
+ break;
+ }
+ else if ( *in >= '0' && *in <= '9' )
+ {
+ unsigned n = *in - '0';
+ const size_t srclen = repat->endp[n] - repat->startp[n];
+ const size_t remaining = buf + BUFLEN - out;
+ const size_t len = srclen < remaining ? srclen : remaining;
+ memcpy( out, repat->startp[n], len );
+ out += len;
+ continue;
+ }
+ /* fall through and copy the next character */
+ }
+ *out++ = *in;
+ }
+ *out = 0;
+
+ result = list_new( result, newstr( buf ) );
+#undef BUFLEN
+ }
+ }
+ }
+
+ return result;
+}
+
diff --git a/jam-files/engine/timestamp.c b/jam-files/engine/timestamp.c
new file mode 100644
index 000000000..8a59c8c0e
--- /dev/null
+++ b/jam-files/engine/timestamp.c
@@ -0,0 +1,226 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+# include "jam.h"
+
+# include "hash.h"
+# include "filesys.h"
+# include "pathsys.h"
+# include "timestamp.h"
+# include "newstr.h"
+# include "strings.h"
+
+/*
+ * timestamp.c - get the timestamp of a file or archive member
+ *
+ * 09/22/00 (seiwald) - downshift names on OS2, too
+ */
+
+/*
+ * BINDING - all known files
+ */
+
+typedef struct _binding BINDING;
+
+struct _binding {
+ char *name;
+ short flags;
+
+# define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */
+
+ short progress;
+
+# define BIND_INIT 0 /* never seen */
+# define BIND_NOENTRY 1 /* timestamp requested but file never found */
+# define BIND_SPOTTED 2 /* file found but not timed yet */
+# define BIND_MISSING 3 /* file found but can't get timestamp */
+# define BIND_FOUND 4 /* file found and time stamped */
+
+ time_t time; /* update time - 0 if not exist */
+};
+
+static struct hash * bindhash = 0;
+static void time_enter( void *, char *, int, time_t );
+
+static char * time_progress[] =
+{
+ "INIT",
+ "NOENTRY",
+ "SPOTTED",
+ "MISSING",
+ "FOUND"
+};
+
+
+/*
+ * timestamp() - return timestamp on a file, if present.
+ */
+
+void timestamp( char * target, time_t * time )
+{
+ PROFILE_ENTER( timestamp );
+
+ PATHNAME f1;
+ PATHNAME f2;
+ BINDING binding;
+ BINDING * b = &binding;
+ string buf[ 1 ];
+#ifdef DOWNSHIFT_PATHS
+ string path;
+ char * p;
+#endif
+
+#ifdef DOWNSHIFT_PATHS
+ string_copy( &path, target );
+ p = path.value;
+
+ do
+ {
+ *p = tolower( *p );
+#ifdef NT
+ /* On NT, we must use backslashes or the file will not be found. */
+ if ( *p == '/' )
+ *p = PATH_DELIM;
+#endif
+ }
+ while ( *p++ );
+
+ target = path.value;
+#endif /* #ifdef DOWNSHIFT_PATHS */
+ string_new( buf );
+
+ if ( !bindhash )
+ bindhash = hashinit( sizeof( BINDING ), "bindings" );
+
+ /* Quick path - is it there? */
+ b->name = target;
+ b->time = b->flags = 0;
+ b->progress = BIND_INIT;
+
+ if ( hashenter( bindhash, (HASHDATA * *)&b ) )
+ b->name = newstr( target ); /* never freed */
+
+ if ( b->progress != BIND_INIT )
+ goto afterscanning;
+
+ b->progress = BIND_NOENTRY;
+
+ /* Not found - have to scan for it. */
+ path_parse( target, &f1 );
+
+ /* Scan directory if not already done so. */
+ {
+ BINDING binding;
+ BINDING * b = &binding;
+
+ f2 = f1;
+ f2.f_grist.len = 0;
+ path_parent( &f2 );
+ path_build( &f2, buf, 0 );
+
+ b->name = buf->value;
+ b->time = b->flags = 0;
+ b->progress = BIND_INIT;
+
+ if ( hashenter( bindhash, (HASHDATA * *)&b ) )
+ b->name = newstr( buf->value ); /* never freed */
+
+ if ( !( b->flags & BIND_SCANNED ) )
+ {
+ file_dirscan( buf->value, time_enter, bindhash );
+ b->flags |= BIND_SCANNED;
+ }
+ }
+
+ /* Scan archive if not already done so. */
+ if ( f1.f_member.len )
+ {
+ BINDING binding;
+ BINDING * b = &binding;
+
+ f2 = f1;
+ f2.f_grist.len = 0;
+ f2.f_member.len = 0;
+ string_truncate( buf, 0 );
+ path_build( &f2, buf, 0 );
+
+ b->name = buf->value;
+ b->time = b->flags = 0;
+ b->progress = BIND_INIT;
+
+ if ( hashenter( bindhash, (HASHDATA * *)&b ) )
+ b->name = newstr( buf->value ); /* never freed */
+
+ if ( !( b->flags & BIND_SCANNED ) )
+ {
+ file_archscan( buf->value, time_enter, bindhash );
+ b->flags |= BIND_SCANNED;
+ }
+ }
+
+ afterscanning:
+
+ if ( b->progress == BIND_SPOTTED )
+ {
+ b->progress = file_time( b->name, &b->time ) < 0
+ ? BIND_MISSING
+ : BIND_FOUND;
+ }
+
+ *time = b->progress == BIND_FOUND ? b->time : 0;
+ string_free( buf );
+#ifdef DOWNSHIFT_PATHS
+ string_free( &path );
+#endif
+
+ PROFILE_EXIT( timestamp );
+}
+
+
+static void time_enter( void * closure, char * target, int found, time_t time )
+{
+ BINDING binding;
+ BINDING * b = &binding;
+ struct hash * bindhash = (struct hash *)closure;
+
+#ifdef DOWNSHIFT_PATHS
+ char path[ MAXJPATH ];
+ char * p = path;
+
+ do *p++ = tolower( *target );
+ while ( *target++ );
+
+ target = path;
+#endif
+
+ b->name = target;
+ b->flags = 0;
+
+ if ( hashenter( bindhash, (HASHDATA * *)&b ) )
+ b->name = newstr( target ); /* never freed */
+
+ b->time = time;
+ b->progress = found ? BIND_FOUND : BIND_SPOTTED;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "time ( %s ) : %s\n", target, time_progress[ b->progress ] );
+}
+
+
+/*
+ * stamps_done() - free timestamp tables.
+ */
+
+void stamps_done()
+{
+ hashdone( bindhash );
+}
diff --git a/jam-files/engine/timestamp.h b/jam-files/engine/timestamp.h
new file mode 100644
index 000000000..f57527639
--- /dev/null
+++ b/jam-files/engine/timestamp.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * timestamp.h - get the timestamp of a file or archive member
+ */
+
+void timestamp( char * target, time_t * time );
+void stamps_done();
diff --git a/jam-files/engine/variable.c b/jam-files/engine/variable.c
new file mode 100644
index 000000000..795f34584
--- /dev/null
+++ b/jam-files/engine/variable.c
@@ -0,0 +1,631 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Reece H. Dunn.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "parse.h"
+#include "variable.h"
+#include "expand.h"
+#include "hash.h"
+#include "filesys.h"
+#include "newstr.h"
+#include "strings.h"
+#include "pathsys.h"
+#include <stdlib.h>
+#include <stdio.h>
+
+/*
+ * variable.c - handle Jam multi-element variables.
+ *
+ * External routines:
+ *
+ * var_defines() - load a bunch of variable=value settings.
+ * var_string() - expand a string with variables in it.
+ * var_get() - get value of a user defined symbol.
+ * var_set() - set a variable in jam's user defined symbol table.
+ * var_swap() - swap a variable's value with the given one.
+ * var_done() - free variable tables.
+ *
+ * Internal routines:
+ *
+ * var_enter() - make new var symbol table entry, returning var ptr.
+ * var_dump() - dump a variable to stdout.
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 01/22/95 (seiwald) - split environment variables at blanks or :'s
+ * 05/10/95 (seiwald) - split path variables at SPLITPATH (not :)
+ * 09/11/00 (seiwald) - defunct var_list() removed
+ */
+
+static struct hash *varhash = 0;
+
+/*
+ * VARIABLE - a user defined multi-value variable
+ */
+
+typedef struct _variable VARIABLE ;
+
+struct _variable
+{
+ char * symbol;
+ LIST * value;
+};
+
+static VARIABLE * var_enter( char * symbol );
+static void var_dump( char * symbol, LIST * value, char * what );
+
+
+/*
+ * var_hash_swap() - swap all variable settings with those passed
+ *
+ * Used to implement separate settings spaces for modules
+ */
+
+void var_hash_swap( struct hash * * new_vars )
+{
+ struct hash * old = varhash;
+ varhash = *new_vars;
+ *new_vars = old;
+}
+
+
+/*
+ * var_defines() - load a bunch of variable=value settings
+ *
+ * If preprocess is false, take the value verbatim.
+ *
+ * Otherwise, if the variable value is enclosed in quotes, strip the
+ * quotes.
+ *
+ * Otherwise, if variable name ends in PATH, split value at :'s.
+ *
+ * Otherwise, split the value at blanks.
+ */
+
+void var_defines( char * const * e, int preprocess )
+{
+ string buf[1];
+
+ string_new( buf );
+
+ for ( ; *e; ++e )
+ {
+ char * val;
+
+# ifdef OS_MAC
+ /* On the mac (MPW), the var=val is actually var\0val */
+ /* Think different. */
+
+ if ( ( val = strchr( *e, '=' ) ) || ( val = *e + strlen( *e ) ) )
+# else
+ if ( ( val = strchr( *e, '=' ) ) )
+# endif
+ {
+ LIST * l = L0;
+ char * pp;
+ char * p;
+# ifdef OPT_NO_EXTERNAL_VARIABLE_SPLIT
+ char split = '\0';
+# else
+ # ifdef OS_MAC
+ char split = ',';
+ # else
+ char split = ' ';
+ # endif
+# endif
+ size_t len = strlen( val + 1 );
+
+ int quoted = ( val[1] == '"' ) && ( val[len] == '"' ) &&
+ ( len > 1 );
+
+ if ( quoted && preprocess )
+ {
+ string_append_range( buf, val + 2, val + len );
+ l = list_new( l, newstr( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ else
+ {
+ /* Split *PATH at :'s, not spaces. */
+ if ( val - 4 >= *e )
+ {
+ if ( !strncmp( val - 4, "PATH", 4 ) ||
+ !strncmp( val - 4, "Path", 4 ) ||
+ !strncmp( val - 4, "path", 4 ) )
+ split = SPLITPATH;
+ }
+
+ /* Do the split. */
+ for
+ (
+ pp = val + 1;
+ preprocess && ( ( p = strchr( pp, split ) ) != 0 );
+ pp = p + 1
+ )
+ {
+ string_append_range( buf, pp, p );
+ l = list_new( l, newstr( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+
+ l = list_new( l, newstr( pp ) );
+ }
+
+ /* Get name. */
+ string_append_range( buf, *e, val );
+ var_set( buf->value, l, VAR_SET );
+ string_truncate( buf, 0 );
+ }
+ }
+ string_free( buf );
+}
+
+
+/*
+ * var_string() - expand a string with variables in it
+ *
+ * Copies in to out; doesn't modify targets & sources.
+ */
+
+int var_string( char * in, char * out, int outsize, LOL * lol )
+{
+ char * out0 = out;
+ char * oute = out + outsize - 1;
+
+ while ( *in )
+ {
+ char * lastword;
+ int dollar = 0;
+
+ /* Copy white space. */
+ while ( isspace( *in ) )
+ {
+ if ( out >= oute )
+ return -1;
+ *out++ = *in++;
+ }
+
+ lastword = out;
+
+ /* Copy non-white space, watching for variables. */
+ while ( *in && !isspace( *in ) )
+ {
+ if ( out >= oute )
+ return -1;
+
+ if ( ( in[ 0 ] == '$' ) && ( in[ 1 ] == '(' ) )
+ {
+ ++dollar;
+ *out++ = *in++;
+ }
+ #ifdef OPT_AT_FILES
+ else if ( ( in[ 0 ] == '@' ) && ( in[ 1 ] == '(' ) )
+ {
+ int depth = 1;
+ char * ine = in + 2;
+ char * split = 0;
+
+ /* Scan the content of the response file @() section. */
+ while ( *ine && ( depth > 0 ) )
+ {
+ switch ( *ine )
+ {
+ case '(': ++depth; break;
+ case ')': --depth; break;
+ case ':':
+ if ( ( depth == 1 ) && ( ine[ 1 ] == 'E' ) && ( ine[ 2 ] == '=' ) )
+ split = ine;
+ break;
+ }
+ ++ine;
+ }
+
+ if ( !split )
+ {
+ /* the @() reference doesn't match the @(foo:E=bar) format.
+ hence we leave it alone by copying directly to output. */
+ int l = 0;
+ if ( out + 2 >= oute ) return -1;
+ *( out++ ) = '@';
+ *( out++ ) = '(';
+ l = var_string( in + 2, out, oute - out, lol );
+ if ( l < 0 ) return -1;
+ out += l;
+ if ( out + 1 >= oute ) return -1;
+ *( out++ ) = ')';
+ }
+ else if ( depth == 0 )
+ {
+ string file_name_v;
+ int file_name_l = 0;
+ const char * file_name_s = 0;
+
+ /* Expand the temporary file name var inline. */
+ #if 0
+ string_copy( &file_name_v, "$(" );
+ string_append_range( &file_name_v, in + 2, split );
+ string_push_back( &file_name_v, ')' );
+ #else
+ string_new( &file_name_v );
+ string_append_range( &file_name_v, in + 2, split );
+ #endif
+ file_name_l = var_string( file_name_v.value, out, oute - out + 1, lol );
+ string_free( &file_name_v );
+ if ( file_name_l < 0 ) return -1;
+ file_name_s = out;
+
+ /* For stdout/stderr we will create a temp file and generate
+ * a command that outputs the content as needed.
+ */
+ if ( ( strcmp( "STDOUT", out ) == 0 ) ||
+ ( strcmp( "STDERR", out ) == 0 ) )
+ {
+ int err_redir = strcmp( "STDERR", out ) == 0;
+ out[ 0 ] = '\0';
+ file_name_s = path_tmpfile();
+ file_name_l = strlen(file_name_s);
+ #ifdef OS_NT
+ if ( ( out + 7 + file_name_l + ( err_redir ? 5 : 0 ) ) >= oute )
+ return -1;
+ sprintf( out,"type \"%s\"%s", file_name_s,
+ err_redir ? " 1>&2" : "" );
+ #else
+ if ( ( out + 6 + file_name_l + ( err_redir ? 5 : 0 ) ) >= oute )
+ return -1;
+ sprintf( out,"cat \"%s\"%s", file_name_s,
+ err_redir ? " 1>&2" : "" );
+ #endif
+ /* We also make sure that the temp files created by this
+ * get nuked eventually.
+ */
+ file_remove_atexit( file_name_s );
+ }
+
+ /* Expand the file value into the file reference. */
+ var_string_to_file( split + 3, ine - split - 4, file_name_s,
+ lol );
+
+ /* Continue on with the expansion. */
+ out += strlen( out );
+ }
+
+ /* And continue with the parsing just past the @() reference. */
+ in = ine;
+ }
+ #endif
+ else
+ {
+ *out++ = *in++;
+ }
+ }
+
+ /* Add zero to 'out' so that 'lastword' is correctly zero-terminated. */
+ if ( out >= oute )
+ return -1;
+ /* Do not increment, intentionally. */
+ *out = '\0';
+
+ /* If a variable encountered, expand it and and embed the
+ * space-separated members of the list in the output.
+ */
+ if ( dollar )
+ {
+ LIST * l = var_expand( L0, lastword, out, lol, 0 );
+
+ out = lastword;
+
+ while ( l )
+ {
+ int so = strlen( l->string );
+
+ if ( out + so >= oute )
+ return -1;
+
+ strcpy( out, l->string );
+ out += so;
+ l = list_next( l );
+ if ( l ) *out++ = ' ';
+ }
+
+ list_free( l );
+ }
+ }
+
+ if ( out >= oute )
+ return -1;
+
+ *out++ = '\0';
+
+ return out - out0;
+}
+
+
+void var_string_to_file( const char * in, int insize, const char * out, LOL * lol )
+{
+ char const * ine = in + insize;
+ FILE * out_file = 0;
+ int out_debug = DEBUG_EXEC ? 1 : 0;
+ if ( globs.noexec )
+ {
+ /* out_debug = 1; */
+ }
+ else if ( strcmp( out, "STDOUT" ) == 0 )
+ {
+ out_file = stdout;
+ }
+ else if ( strcmp( out, "STDERR" ) == 0 )
+ {
+ out_file = stderr;
+ }
+ else
+ {
+ /* Handle "path to file" filenames. */
+ string out_name;
+ if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' ) )
+ {
+ string_copy( &out_name, out + 1 );
+ string_truncate( &out_name, out_name.size - 1 );
+ }
+ else
+ {
+ string_copy( &out_name,out );
+ }
+ out_file = fopen( out_name.value, "w" );
+ if ( !out_file )
+ {
+ printf( "failed to write output file '%s'!\n", out_name.value );
+ exit( EXITBAD );
+ }
+ string_free( &out_name );
+ }
+
+ if ( out_debug ) printf( "\nfile %s\n", out );
+
+ while ( *in && ( in < ine ) )
+ {
+ int dollar = 0;
+ const char * output_0 = in;
+ const char * output_1 = in;
+
+ /* Copy white space. */
+ while ( ( output_1 < ine ) && isspace( *output_1 ) )
+ ++output_1;
+
+ if ( output_0 < output_1 )
+ {
+ if ( out_file ) fwrite( output_0, output_1 - output_0, 1, out_file );
+ if ( out_debug ) fwrite( output_0, output_1 - output_0, 1, stdout );
+ }
+ output_0 = output_1;
+
+ /* Copy non-white space, watching for variables. */
+ while ( ( output_1 < ine ) && *output_1 && !isspace( *output_1 ) )
+ {
+ if ( ( output_1[ 0 ] == '$' ) && ( output_1[ 1 ] == '(' ) )
+ ++dollar;
+ ++output_1;
+ }
+
+ /* If a variable encountered, expand it and embed the space-separated
+ * members of the list in the output.
+ */
+ if ( dollar )
+ {
+ LIST * l = var_expand( L0, (char *)output_0, (char *)output_1, lol, 0 );
+
+ while ( l )
+ {
+ if ( out_file ) fputs( l->string, out_file );
+ if ( out_debug ) puts( l->string );
+ l = list_next( l );
+ if ( l )
+ {
+ if ( out_file ) fputc( ' ', out_file );
+ if ( out_debug ) fputc( ' ', stdout );
+ }
+ }
+
+ list_free( l );
+ }
+ else if ( output_0 < output_1 )
+ {
+ if ( out_file )
+ {
+ const char * output_n = output_0;
+ while ( output_n < output_1 )
+ {
+ output_n += fwrite( output_n, 1, output_1-output_n, out_file );
+ }
+ }
+ if ( out_debug )
+ {
+ const char * output_n = output_0;
+ while ( output_n < output_1 )
+ {
+ output_n += fwrite( output_n, 1, output_1-output_n, stdout );
+ }
+ }
+ }
+
+ in = output_1;
+ }
+
+ if ( out_file && ( out_file != stdout ) && ( out_file != stderr ) )
+ {
+ fflush( out_file );
+ fclose( out_file );
+ }
+
+ if ( out_debug ) fputc( '\n', stdout );
+}
+
+
+/*
+ * var_get() - get value of a user defined symbol.
+ *
+ * Returns NULL if symbol unset.
+ */
+
+LIST * var_get( char * symbol )
+{
+ LIST * result = 0;
+#ifdef OPT_AT_FILES
+ /* Some "fixed" variables... */
+ if ( strcmp( "TMPDIR", symbol ) == 0 )
+ {
+ result = list_new( L0, newstr( (char *)path_tmpdir() ) );
+ }
+ else if ( strcmp( "TMPNAME", symbol ) == 0 )
+ {
+ result = list_new( L0, newstr( (char *)path_tmpnam() ) );
+ }
+ else if ( strcmp( "TMPFILE", symbol ) == 0 )
+ {
+ result = list_new( L0, newstr( (char *)path_tmpfile() ) );
+ }
+ else if ( strcmp( "STDOUT", symbol ) == 0 )
+ {
+ result = list_new( L0, newstr( "STDOUT" ) );
+ }
+ else if ( strcmp( "STDERR", symbol ) == 0 )
+ {
+ result = list_new( L0, newstr( "STDERR" ) );
+ }
+ else
+#endif
+ {
+ VARIABLE var;
+ VARIABLE * v = &var;
+
+ v->symbol = symbol;
+
+ if ( varhash && hashcheck( varhash, (HASHDATA * *)&v ) )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( v->symbol, v->value, "get" );
+ result = v->value;
+ }
+ }
+ return result;
+}
+
+
+/*
+ * var_set() - set a variable in Jam's user defined symbol table.
+ *
+ * 'flag' controls the relationship between new and old values of the variable:
+ * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT
+ * only uses the new if the variable was previously unset.
+ *
+ * Copies symbol. Takes ownership of value.
+ */
+
+void var_set( char * symbol, LIST * value, int flag )
+{
+ VARIABLE * v = var_enter( symbol );
+
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+
+ switch ( flag )
+ {
+ case VAR_SET:
+ /* Replace value */
+ list_free( v->value );
+ v->value = value;
+ break;
+
+ case VAR_APPEND:
+ /* Append value */
+ v->value = list_append( v->value, value );
+ break;
+
+ case VAR_DEFAULT:
+ /* Set only if unset */
+ if ( !v->value )
+ v->value = value;
+ else
+ list_free( value );
+ break;
+ }
+}
+
+
+/*
+ * var_swap() - swap a variable's value with the given one.
+ */
+
+LIST * var_swap( char * symbol, LIST * value )
+{
+ VARIABLE * v = var_enter( symbol );
+ LIST * oldvalue = v->value;
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+ v->value = value;
+ return oldvalue;
+}
+
+
+/*
+ * var_enter() - make new var symbol table entry, returning var ptr.
+ */
+
+static VARIABLE * var_enter( char * symbol )
+{
+ VARIABLE var;
+ VARIABLE * v = &var;
+
+ if ( !varhash )
+ varhash = hashinit( sizeof( VARIABLE ), "variables" );
+
+ v->symbol = symbol;
+ v->value = 0;
+
+ if ( hashenter( varhash, (HASHDATA * *)&v ) )
+ v->symbol = newstr( symbol ); /* never freed */
+
+ return v;
+}
+
+
+/*
+ * var_dump() - dump a variable to stdout.
+ */
+
+static void var_dump( char * symbol, LIST * value, char * what )
+{
+ printf( "%s %s = ", what, symbol );
+ list_print( value );
+ printf( "\n" );
+}
+
+
+/*
+ * var_done() - free variable tables.
+ */
+
+static void delete_var_( void * xvar, void * data )
+{
+ VARIABLE * v = (VARIABLE *)xvar;
+ freestr( v->symbol );
+ list_free( v-> value );
+}
+
+
+void var_done()
+{
+ hashenumerate( varhash, delete_var_, (void *)0 );
+ hashdone( varhash );
+}
diff --git a/jam-files/engine/variable.h b/jam-files/engine/variable.h
new file mode 100644
index 000000000..5c49e3ca5
--- /dev/null
+++ b/jam-files/engine/variable.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * variable.h - handle jam multi-element variables
+ */
+
+struct hash;
+
+void var_defines( char* const *e, int preprocess );
+int var_string( char *in, char *out, int outsize, LOL *lol );
+LIST * var_get( char *symbol );
+void var_set( char *symbol, LIST *value, int flag );
+LIST * var_swap( char *symbol, LIST *value );
+void var_done();
+void var_hash_swap( struct hash** );
+
+/** Expands the "in" expression directly into the "out" file.
+ The file can be one of: a path, STDOUT, or STDERR to send
+ the output to a file overwriting previous content, to
+ the console, or to the error output respectively.
+*/
+void var_string_to_file( const char * in, int insize, const char * out, LOL * lol );
+
+/*
+ * Defines for var_set().
+ */
+
+# define VAR_SET 0 /* override previous value */
+# define VAR_APPEND 1 /* append to previous value */
+# define VAR_DEFAULT 2 /* set only if no previous value */
+
diff --git a/jam-files/engine/w32_getreg.c b/jam-files/engine/w32_getreg.c
new file mode 100644
index 000000000..5a06f43e9
--- /dev/null
+++ b/jam-files/engine/w32_getreg.c
@@ -0,0 +1,207 @@
+/*
+Copyright Paul Lin 2003. Copyright 2006 Bojan Resnik.
+Distributed under the Boost Software License, Version 1.0. (See accompanying
+file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+# include "jam.h"
+
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+
+# include "lists.h"
+# include "newstr.h"
+# include "parse.h"
+# include "frames.h"
+# include "strings.h"
+
+# define WIN32_LEAN_AND_MEAN
+# include <windows.h>
+
+# define MAX_REGISTRY_DATA_LENGTH 4096
+# define MAX_REGISTRY_KEYNAME_LENGTH 256
+# define MAX_REGISTRY_VALUENAME_LENGTH 16384
+
+typedef struct
+{
+ LPCSTR name;
+ HKEY value;
+} KeyMap;
+
+static const KeyMap dlRootKeys[] = {
+ { "HKLM", HKEY_LOCAL_MACHINE },
+ { "HKCU", HKEY_CURRENT_USER },
+ { "HKCR", HKEY_CLASSES_ROOT },
+ { "HKEY_LOCAL_MACHINE", HKEY_LOCAL_MACHINE },
+ { "HKEY_CURRENT_USER", HKEY_CURRENT_USER },
+ { "HKEY_CLASSES_ROOT", HKEY_CLASSES_ROOT },
+ { 0, 0 }
+};
+
+static HKEY get_key(char const** path)
+{
+ const KeyMap *p;
+
+ for (p = dlRootKeys; p->name; ++p)
+ {
+ int n = strlen(p->name);
+ if (!strncmp(*path,p->name,n))
+ {
+ if ((*path)[n] == '\\' || (*path)[n] == 0)
+ {
+ *path += n + 1;
+ break;
+ }
+ }
+ }
+
+ return p->value;
+}
+
+LIST*
+builtin_system_registry(
+ PARSE *parse,
+ FRAME *frame )
+{
+ char const* path = lol_get(frame->args, 0)->string;
+ LIST* result = L0;
+ HKEY key = get_key(&path);
+
+ if (
+ key != 0
+ && ERROR_SUCCESS == RegOpenKeyEx(key, path, 0, KEY_QUERY_VALUE, &key)
+ )
+ {
+ DWORD type;
+ BYTE data[MAX_REGISTRY_DATA_LENGTH];
+ DWORD len = sizeof(data);
+ LIST const* const field = lol_get(frame->args, 1);
+
+ if ( ERROR_SUCCESS ==
+ RegQueryValueEx(key, field ? field->string : 0, 0, &type, data, &len) )
+ {
+ switch (type)
+ {
+
+ case REG_EXPAND_SZ:
+ {
+ long len;
+ string expanded[1];
+ string_new(expanded);
+
+ while (
+ (len = ExpandEnvironmentStrings(
+ (LPCSTR)data, expanded->value, expanded->capacity))
+ > expanded->capacity
+ )
+ string_reserve(expanded, len);
+
+ expanded->size = len - 1;
+
+ result = list_new( result, newstr(expanded->value) );
+ string_free( expanded );
+ }
+ break;
+
+ case REG_MULTI_SZ:
+ {
+ char* s;
+
+ for (s = (char*)data; *s; s += strlen(s) + 1)
+ result = list_new( result, newstr(s) );
+
+ }
+ break;
+
+ case REG_DWORD:
+ {
+ char buf[100];
+ sprintf( buf, "%u", *(PDWORD)data );
+ result = list_new( result, newstr(buf) );
+ }
+ break;
+
+ case REG_SZ:
+ result = list_new( result, newstr((char*)data) );
+ break;
+ }
+ }
+ RegCloseKey(key);
+ }
+ return result;
+}
+
+static LIST* get_subkey_names(HKEY key, char const* path)
+{
+ LIST* result = 0;
+
+ if ( ERROR_SUCCESS ==
+ RegOpenKeyEx(key, path, 0, KEY_ENUMERATE_SUB_KEYS, &key)
+ )
+ {
+ char name[MAX_REGISTRY_KEYNAME_LENGTH];
+ DWORD name_size = sizeof(name);
+ DWORD index;
+ FILETIME last_write_time;
+
+ for ( index = 0;
+ ERROR_SUCCESS == RegEnumKeyEx(
+ key, index, name, &name_size, 0, 0, 0, &last_write_time);
+ ++index,
+ name_size = sizeof(name)
+ )
+ {
+ name[name_size] = 0;
+ result = list_append(result, list_new(0, newstr(name)));
+ }
+
+ RegCloseKey(key);
+ }
+
+ return result;
+}
+
+static LIST* get_value_names(HKEY key, char const* path)
+{
+ LIST* result = 0;
+
+ if ( ERROR_SUCCESS == RegOpenKeyEx(key, path, 0, KEY_QUERY_VALUE, &key) )
+ {
+ char name[MAX_REGISTRY_VALUENAME_LENGTH];
+ DWORD name_size = sizeof(name);
+ DWORD index;
+
+ for ( index = 0;
+ ERROR_SUCCESS == RegEnumValue(
+ key, index, name, &name_size, 0, 0, 0, 0);
+ ++index,
+ name_size = sizeof(name)
+ )
+ {
+ name[name_size] = 0;
+ result = list_append(result, list_new(0, newstr(name)));
+ }
+
+ RegCloseKey(key);
+ }
+
+ return result;
+}
+
+LIST*
+builtin_system_registry_names(
+ PARSE *parse,
+ FRAME *frame )
+{
+ char const* path = lol_get(frame->args, 0)->string;
+ char const* result_type = lol_get(frame->args, 1)->string;
+
+ HKEY key = get_key(&path);
+
+ if ( !strcmp(result_type, "subkeys") )
+ return get_subkey_names(key, path);
+ if ( !strcmp(result_type, "values") )
+ return get_value_names(key, path);
+ return 0;
+}
+
+# endif
diff --git a/jam-files/engine/yyacc.c b/jam-files/engine/yyacc.c
new file mode 100644
index 000000000..b5efc96b5
--- /dev/null
+++ b/jam-files/engine/yyacc.c
@@ -0,0 +1,268 @@
+/* Copyright 2002 Rene Rivera.
+** Distributed under the Boost Software License, Version 1.0.
+** (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include <stdlib.h>
+
+/*
+# yyacc - yacc wrapper
+#
+# Allows tokens to be written as `literal` and then automatically
+# substituted with #defined tokens.
+#
+# Usage:
+# yyacc file.y filetab.h file.yy
+#
+# inputs:
+# file.yy yacc grammar with ` literals
+#
+# outputs:
+# file.y yacc grammar
+# filetab.h array of string <-> token mappings
+#
+# 3-13-93
+# Documented and p moved in sed command (for some reason,
+# s/x/y/p doesn't work).
+# 10-12-93
+# Take basename as second argument.
+# 12-31-96
+# reversed order of args to be compatible with GenFile rule
+# 11-20-2002
+# Reimplemented as a C program for portability. (Rene Rivera)
+*/
+
+void print_usage();
+char * copy_string(char * s, int l);
+char * tokenize_string(char * s);
+int cmp_literal(const void * a, const void * b);
+
+typedef struct
+{
+ char * string;
+ char * token;
+} literal;
+
+int main(int argc, char ** argv)
+{
+ int result = 0;
+ if (argc != 4)
+ {
+ print_usage();
+ result = 1;
+ }
+ else
+ {
+ FILE * token_output_f = 0;
+ FILE * grammar_output_f = 0;
+ FILE * grammar_source_f = 0;
+
+ grammar_source_f = fopen(argv[3],"r");
+ if (grammar_source_f == 0) { result = 1; }
+ if (result == 0)
+ {
+ literal literals[1024];
+ int t = 0;
+ char l[2048];
+ while (1)
+ {
+ if (fgets(l,2048,grammar_source_f) != 0)
+ {
+ char * c = l;
+ while (1)
+ {
+ char * c1 = strchr(c,'`');
+ if (c1 != 0)
+ {
+ char * c2 = strchr(c1+1,'`');
+ if (c2 != 0)
+ {
+ literals[t].string = copy_string(c1+1,c2-c1-1);
+ literals[t].token = tokenize_string(literals[t].string);
+ t += 1;
+ c = c2+1;
+ }
+ else
+ break;
+ }
+ else
+ break;
+ }
+ }
+ else
+ {
+ break;
+ }
+ }
+ literals[t].string = 0;
+ literals[t].token = 0;
+ qsort(literals,t,sizeof(literal),cmp_literal);
+ {
+ int p = 1;
+ int i = 1;
+ while (literals[i].string != 0)
+ {
+ if (strcmp(literals[p-1].string,literals[i].string) != 0)
+ {
+ literals[p] = literals[i];
+ p += 1;
+ }
+ i += 1;
+ }
+ literals[p].string = 0;
+ literals[p].token = 0;
+ t = p;
+ }
+ token_output_f = fopen(argv[2],"w");
+ if (token_output_f != 0)
+ {
+ int i = 0;
+ while (literals[i].string != 0)
+ {
+ fprintf(token_output_f," { \"%s\", %s },\n",literals[i].string,literals[i].token);
+ i += 1;
+ }
+ fclose(token_output_f);
+ }
+ else
+ result = 1;
+ if (result == 0)
+ {
+ grammar_output_f = fopen(argv[1],"w");
+ if (grammar_output_f != 0)
+ {
+ int i = 0;
+ while (literals[i].string != 0)
+ {
+ fprintf(grammar_output_f,"%%token %s\n",literals[i].token);
+ i += 1;
+ }
+ rewind(grammar_source_f);
+ while (1)
+ {
+ if (fgets(l,2048,grammar_source_f) != 0)
+ {
+ char * c = l;
+ while (1)
+ {
+ char * c1 = strchr(c,'`');
+ if (c1 != 0)
+ {
+ char * c2 = strchr(c1+1,'`');
+ if (c2 != 0)
+ {
+ literal key;
+ literal * replacement = 0;
+ key.string = copy_string(c1+1,c2-c1-1);
+ key.token = 0;
+ replacement = (literal*)bsearch(
+ &key,literals,t,sizeof(literal),cmp_literal);
+ *c1 = 0;
+ fprintf(grammar_output_f,"%s%s",c,replacement->token);
+ c = c2+1;
+ }
+ else
+ {
+ fprintf(grammar_output_f,"%s",c);
+ break;
+ }
+ }
+ else
+ {
+ fprintf(grammar_output_f,"%s",c);
+ break;
+ }
+ }
+ }
+ else
+ {
+ break;
+ }
+ }
+ fclose(grammar_output_f);
+ }
+ else
+ result = 1;
+ }
+ }
+ if (result != 0)
+ {
+ perror("yyacc");
+ }
+ }
+ return result;
+}
+
+static char * usage[] = {
+ "yyacc <grammar output.y> <token table output.h> <grammar source.yy>",
+ 0 };
+
+void print_usage()
+{
+ char ** u;
+ for (u = usage; *u != 0; ++u)
+ {
+ fputs(*u,stderr); putc('\n',stderr);
+ }
+}
+
+char * copy_string(char * s, int l)
+{
+ char * result = (char*)malloc(l+1);
+ strncpy(result,s,l);
+ result[l] = 0;
+ return result;
+}
+
+char * tokenize_string(char * s)
+{
+ char * result;
+ char * literal = s;
+ int l;
+ int c;
+
+ if (strcmp(s,":") == 0) literal = "_colon";
+ else if (strcmp(s,"!") == 0) literal = "_bang";
+ else if (strcmp(s,"!=") == 0) literal = "_bang_equals";
+ else if (strcmp(s,"&&") == 0) literal = "_amperamper";
+ else if (strcmp(s,"&") == 0) literal = "_amper";
+ else if (strcmp(s,"+") == 0) literal = "_plus";
+ else if (strcmp(s,"+=") == 0) literal = "_plus_equals";
+ else if (strcmp(s,"||") == 0) literal = "_barbar";
+ else if (strcmp(s,"|") == 0) literal = "_bar";
+ else if (strcmp(s,";") == 0) literal = "_semic";
+ else if (strcmp(s,"-") == 0) literal = "_minus";
+ else if (strcmp(s,"<") == 0) literal = "_langle";
+ else if (strcmp(s,"<=") == 0) literal = "_langle_equals";
+ else if (strcmp(s,">") == 0) literal = "_rangle";
+ else if (strcmp(s,">=") == 0) literal = "_rangle_equals";
+ else if (strcmp(s,".") == 0) literal = "_period";
+ else if (strcmp(s,"?") == 0) literal = "_question";
+ else if (strcmp(s,"?=") == 0) literal = "_question_equals";
+ else if (strcmp(s,"=") == 0) literal = "_equals";
+ else if (strcmp(s,",") == 0) literal = "_comma";
+ else if (strcmp(s,"[") == 0) literal = "_lbracket";
+ else if (strcmp(s,"]") == 0) literal = "_rbracket";
+ else if (strcmp(s,"{") == 0) literal = "_lbrace";
+ else if (strcmp(s,"}") == 0) literal = "_rbrace";
+ else if (strcmp(s,"(") == 0) literal = "_lparen";
+ else if (strcmp(s,")") == 0) literal = "_rparen";
+ l = strlen(literal)+2;
+ result = (char*)malloc(l+1);
+ for (c = 0; literal[c] != 0; ++c)
+ {
+ result[c] = toupper(literal[c]);
+ }
+ result[l-2] = '_';
+ result[l-1] = 't';
+ result[l] = 0;
+ return result;
+}
+
+int cmp_literal(const void * a, const void * b)
+{
+ return strcmp(((const literal *)a)->string,((const literal *)b)->string);
+}
diff --git a/jam-files/test.sh b/jam-files/test.sh
new file mode 100755
index 000000000..4e06d9e2b
--- /dev/null
+++ b/jam-files/test.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+g++ "$@" -x c++ - <<<'int main() {}' -o /dev/null >/dev/null 2>/dev/null
+echo -n $?
diff --git a/lm/Jamfile b/lm/Jamfile
new file mode 100644
index 000000000..19cbaec4b
--- /dev/null
+++ b/lm/Jamfile
@@ -0,0 +1,12 @@
+lib kenlm : bhiksha.cc binary_format.cc config.cc lm_exception.cc model.cc quantize.cc read_arpa.cc search_hashed.cc search_trie.cc trie.cc trie_sort.cc virtual_interface.cc vocab.cc ../util//kenutil : <include>.. : : <include>.. ;
+
+import testing ;
+
+run left_test.cc kenlm ..//boost_unit_test_framework : : test.arpa ;
+run model_test.cc kenlm ..//boost_unit_test_framework : : test.arpa test_nounk.arpa ;
+
+exe query : ngram_query.cc kenlm ;
+exe build_binary : build_binary.cc kenlm ;
+
+install legacy : build_binary query
+ : <location>$(TOP)/lm <install-type>EXE <install-dependencies>on <link>shared:<dll-path>$(TOP)/lm <link>shared:<install-type>LIB ;
diff --git a/lm/Makefile.am b/lm/Makefile.am
deleted file mode 100644
index f208f3223..000000000
--- a/lm/Makefile.am
+++ /dev/null
@@ -1,25 +0,0 @@
-lib_LTLIBRARIES = libkenlm.la
-bin_PROGRAMS = query build_binary
-
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES $(BOOST_CPPFLAGS)
-libkenlm_la_SOURCES = \
- bhiksha.cc \
- binary_format.cc \
- config.cc \
- lm_exception.cc \
- model.cc \
- search_hashed.cc \
- search_trie.cc \
- quantize.cc \
- read_arpa.cc \
- trie.cc \
- trie_sort.cc \
- virtual_interface.cc \
- vocab.cc
-
-query_SOURCES = ngram_query.cc
-query_LDADD = libkenlm.la $(top_srcdir)/util/libkenutil.la
-
-build_binary_SOURCES = build_binary.cc
-build_binary_LDADD = libkenlm.la $(top_srcdir)/util/libkenutil.la
-
diff --git a/lm/binary_format.cc b/lm/binary_format.cc
index 5aa274216..ab0166a65 100644
--- a/lm/binary_format.cc
+++ b/lm/binary_format.cc
@@ -20,15 +20,16 @@ const char kMagicBytes[] = "mmap lm http://kheafield.com/code format version 5\n
const char kMagicIncomplete[] = "mmap lm http://kheafield.com/code incomplete\n";
const long int kMagicVersion = 5;
-// Test values.
-struct Sanity {
+// Old binary files built on 32-bit machines have this header.
+// TODO: eliminate with next binary release.
+struct OldSanity {
char magic[sizeof(kMagicBytes)];
float zero_f, one_f, minus_half_f;
WordIndex one_word_index, max_word_index;
uint64_t one_uint64;
void SetToReference() {
- std::memset(this, 0, sizeof(Sanity));
+ std::memset(this, 0, sizeof(OldSanity));
std::memcpy(magic, kMagicBytes, sizeof(magic));
zero_f = 0.0; one_f = 1.0; minus_half_f = -0.5;
one_word_index = 1;
@@ -37,6 +38,25 @@ struct Sanity {
}
};
+
+// Test values aligned to 8 bytes.
+struct Sanity {
+ char magic[ALIGN8(sizeof(kMagicBytes))];
+ float zero_f, one_f, minus_half_f;
+ WordIndex one_word_index, max_word_index, padding_to_8;
+ uint64_t one_uint64;
+
+ void SetToReference() {
+ std::memset(this, 0, sizeof(Sanity));
+ std::memcpy(magic, kMagicBytes, sizeof(kMagicBytes));
+ zero_f = 0.0; one_f = 1.0; minus_half_f = -0.5;
+ one_word_index = 1;
+ max_word_index = std::numeric_limits<WordIndex>::max();
+ padding_to_8 = 0;
+ one_uint64 = 1;
+ }
+};
+
const char *kModelNames[6] = {"hashed n-grams with probing", "hashed n-grams with sorted uniform find", "trie", "trie with quantization", "trie with array-compressed pointers", "trie with quantization and array-compressed pointers"};
std::size_t TotalHeaderSize(unsigned char order) {
@@ -76,8 +96,12 @@ uint8_t *GrowForSearch(const Config &config, std::size_t vocab_pad, std::size_t
std::size_t adjusted_vocab = backing.vocab.size() + vocab_pad;
if (config.write_mmap) {
// Grow the file to accomodate the search, using zeros.
- if (-1 == ftruncate(backing.file.get(), adjusted_vocab + memory_size))
- UTIL_THROW(util::ErrnoException, "ftruncate on " << config.write_mmap << " to " << (adjusted_vocab + memory_size) << " failed");
+ try {
+ util::ResizeOrThrow(backing.file.get(), adjusted_vocab + memory_size);
+ } catch (util::ErrnoException &e) {
+ e << " for file " << config.write_mmap;
+ throw e;
+ }
// We're skipping over the header and vocab for the search space mmap. mmap likes page aligned offsets, so some arithmetic to round the offset down.
std::size_t page_size = util::SizePage();
@@ -96,7 +120,7 @@ void FinishFile(const Config &config, ModelType model_type, unsigned int search_
util::SyncOrThrow(backing.search.get(), backing.search.size());
util::SyncOrThrow(backing.vocab.get(), backing.vocab.size());
// header and vocab share the same mmap. The header is written here because we know the counts.
- Parameters params;
+ Parameters params = Parameters();
params.counts = counts;
params.fixed.order = counts.size();
params.fixed.probing_multiplier = config.probing_multiplier;
@@ -132,6 +156,10 @@ bool IsBinaryFormat(int fd) {
if ((end_ptr != begin_version) && version != kMagicVersion) {
UTIL_THROW(FormatLoadException, "Binary file has version " << version << " but this implementation expects version " << kMagicVersion << " so you'll have to use the ARPA to rebuild your binary");
}
+
+ OldSanity old_sanity = OldSanity();
+ old_sanity.SetToReference();
+ UTIL_THROW_IF(!memcmp(memory.get(), &old_sanity, sizeof(OldSanity)), FormatLoadException, "Looks like this is an old 32-bit format. The old 32-bit format has been removed so that 64-bit and 32-bit files are exchangeable.");
UTIL_THROW(FormatLoadException, "File looks like it should be loaded with mmap, but the test values don't match. Try rebuilding the binary format LM using the same code revision, compiler, and architecture");
}
return false;
@@ -172,9 +200,8 @@ uint8_t *SetupBinary(const Config &config, const Parameters &params, std::size_t
if (config.enumerate_vocab && !params.fixed.has_vocabulary)
UTIL_THROW(FormatLoadException, "The decoder requested all the vocabulary strings, but this binary file does not have them. You may need to rebuild the binary file with an updated version of build_binary.");
- if (config.enumerate_vocab) {
- util::SeekOrThrow(backing.file.get(), total_map);
- }
+ // Seek to vocabulary words
+ util::SeekOrThrow(backing.file.get(), total_map);
return reinterpret_cast<uint8_t*>(backing.search.get()) + TotalHeaderSize(params.counts.size());
}
diff --git a/lm/build_binary.cc b/lm/build_binary.cc
index f313002fe..e235cc5a3 100644
--- a/lm/build_binary.cc
+++ b/lm/build_binary.cc
@@ -1,6 +1,5 @@
#include "lm/model.hh"
#include "util/file_piece.hh"
-#include "util/portability.hh"
#include <cstdlib>
#include <exception>
@@ -91,7 +90,7 @@ void ShowSizes(const char *file, const lm::ngram::Config &config) {
prefix = 'G';
divide = 1 << 30;
}
- long int length = std::max<long int>(2, lrint(ceil(log10((double) max_length / divide))));
+ long int length = std::max<long int>(2, static_cast<long int>(ceil(log10((double) max_length / divide))));
std::cout << "Memory estimate:\ntype ";
// right align bytes.
for (long int i = 0; i < length - 2; ++i) std::cout << ' ';
@@ -161,41 +160,45 @@ int main(int argc, char *argv[]) {
}
if (optind + 1 == argc) {
ShowSizes(argv[optind], config);
- } else if (optind + 2 == argc) {
+ return 0;
+ }
+ const char *model_type, *from_file;
+ if (optind + 2 == argc) {
+ model_type = "probing";
+ from_file = argv[optind];
config.write_mmap = argv[optind + 1];
- if (quantize || set_backoff_bits) ProbingQuantizationUnsupported();
- ProbingModel(argv[optind], config);
} else if (optind + 3 == argc) {
- const char *model_type = argv[optind];
- const char *from_file = argv[optind + 1];
+ model_type = argv[optind];
+ from_file = argv[optind + 1];
config.write_mmap = argv[optind + 2];
- if (!strcmp(model_type, "probing")) {
- if (quantize || set_backoff_bits) ProbingQuantizationUnsupported();
- ProbingModel(from_file, config);
- } else if (!strcmp(model_type, "trie")) {
- if (quantize) {
- if (bhiksha) {
- QuantArrayTrieModel(from_file, config);
- } else {
- QuantTrieModel(from_file, config);
- }
+ } else {
+ Usage(argv[0]);
+ }
+ if (!strcmp(model_type, "probing")) {
+ if (quantize || set_backoff_bits) ProbingQuantizationUnsupported();
+ ProbingModel(from_file, config);
+ } else if (!strcmp(model_type, "trie")) {
+ if (quantize) {
+ if (bhiksha) {
+ QuantArrayTrieModel(from_file, config);
} else {
- if (bhiksha) {
- ArrayTrieModel(from_file, config);
- } else {
- TrieModel(from_file, config);
- }
+ QuantTrieModel(from_file, config);
}
} else {
- Usage(argv[0]);
+ if (bhiksha) {
+ ArrayTrieModel(from_file, config);
+ } else {
+ TrieModel(from_file, config);
+ }
}
} else {
Usage(argv[0]);
}
- }
- catch (const std::exception &e) {
+ std::cerr << "Built " << config.write_mmap << " successfully." << std::endl;
+ } catch (const std::exception &e) {
std::cerr << e.what() << std::endl;
return 1;
}
+
return 0;
}
diff --git a/lm/left_test.cc b/lm/left_test.cc
index 8bb91cb37..c85e5efa8 100644
--- a/lm/left_test.cc
+++ b/lm/left_test.cc
@@ -142,7 +142,7 @@ template <class M> float TreeMiddle(const M &m, const std::vector<WordIndex> &wo
template <class M> void LookupVocab(const M &m, const StringPiece &str, std::vector<WordIndex> &out) {
out.clear();
- for (util::PieceIterator<' '> i(str); i; ++i) {
+ for (util::TokenIter<util::SingleCharacter, true> i(str, ' '); i; ++i) {
out.push_back(m.GetVocabulary().Index(*i));
}
}
@@ -326,10 +326,17 @@ template <class M> void FullGrow(const M &m) {
}
}
+const char *FileLocation() {
+ if (boost::unit_test::framework::master_test_suite().argc < 2) {
+ return "test.arpa";
+ }
+ return boost::unit_test::framework::master_test_suite().argv[1];
+}
+
template <class M> void Everything() {
Config config;
config.messages = NULL;
- M m("test.arpa", config);
+ M m(FileLocation(), config);
Short(m);
Charge(m);
diff --git a/lm/lm.xcodeproj/project.pbxproj b/lm/lm.xcodeproj/project.pbxproj
new file mode 100644
index 000000000..14ea5097f
--- /dev/null
+++ b/lm/lm.xcodeproj/project.pbxproj
@@ -0,0 +1,354 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1EE8C3981476A73C002496F2 /* bhiksha.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3651476A73C002496F2 /* bhiksha.cc */; };
+ 1EE8C3991476A73C002496F2 /* bhiksha.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3661476A73C002496F2 /* bhiksha.hh */; };
+ 1EE8C39A1476A73C002496F2 /* binary_format.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3671476A73C002496F2 /* binary_format.cc */; };
+ 1EE8C39B1476A73C002496F2 /* binary_format.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3681476A73C002496F2 /* binary_format.hh */; };
+ 1EE8C39C1476A73C002496F2 /* blank.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3691476A73C002496F2 /* blank.hh */; };
+ 1EE8C39D1476A73C002496F2 /* build_binary.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C36A1476A73C002496F2 /* build_binary.cc */; };
+ 1EE8C39E1476A73C002496F2 /* config.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C36D1476A73C002496F2 /* config.cc */; };
+ 1EE8C39F1476A73C002496F2 /* config.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C36E1476A73C002496F2 /* config.hh */; };
+ 1EE8C3A01476A73C002496F2 /* enumerate_vocab.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3711476A73C002496F2 /* enumerate_vocab.hh */; };
+ 1EE8C3A11476A73C002496F2 /* facade.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3721476A73C002496F2 /* facade.hh */; };
+ 1EE8C3A21476A73C002496F2 /* left_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3731476A73C002496F2 /* left_test.cc */; };
+ 1EE8C3A31476A73C002496F2 /* left.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3741476A73C002496F2 /* left.hh */; };
+ 1EE8C3A41476A73C002496F2 /* lm_exception.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3761476A73C002496F2 /* lm_exception.cc */; };
+ 1EE8C3A51476A73C002496F2 /* lm_exception.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3771476A73C002496F2 /* lm_exception.hh */; };
+ 1EE8C3A71476A73C002496F2 /* max_order.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C37B1476A73C002496F2 /* max_order.hh */; };
+ 1EE8C3A81476A73C002496F2 /* model_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C37C1476A73C002496F2 /* model_test.cc */; };
+ 1EE8C3A91476A73C002496F2 /* model_type.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C37D1476A73C002496F2 /* model_type.hh */; };
+ 1EE8C3AA1476A73C002496F2 /* model.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C37E1476A73C002496F2 /* model.cc */; };
+ 1EE8C3AB1476A73C002496F2 /* model.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C37F1476A73C002496F2 /* model.hh */; };
+ 1EE8C3AC1476A73C002496F2 /* ngram_query.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3801476A73C002496F2 /* ngram_query.cc */; };
+ 1EE8C3AD1476A73C002496F2 /* quantize.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3811476A73C002496F2 /* quantize.cc */; };
+ 1EE8C3AE1476A73C002496F2 /* quantize.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3821476A73C002496F2 /* quantize.hh */; };
+ 1EE8C3AF1476A73C002496F2 /* read_arpa.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3831476A73C002496F2 /* read_arpa.cc */; };
+ 1EE8C3B01476A73C002496F2 /* read_arpa.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3841476A73C002496F2 /* read_arpa.hh */; };
+ 1EE8C3B11476A73C002496F2 /* return.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3861476A73C002496F2 /* return.hh */; };
+ 1EE8C3B21476A73C002496F2 /* search_hashed.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3871476A73C002496F2 /* search_hashed.cc */; };
+ 1EE8C3B31476A73C002496F2 /* search_hashed.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3881476A73C002496F2 /* search_hashed.hh */; };
+ 1EE8C3B41476A73C002496F2 /* search_trie.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3891476A73C002496F2 /* search_trie.cc */; };
+ 1EE8C3B51476A73C002496F2 /* search_trie.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C38A1476A73C002496F2 /* search_trie.hh */; };
+ 1EE8C3B61476A73C002496F2 /* trie_sort.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C38E1476A73C002496F2 /* trie_sort.cc */; };
+ 1EE8C3B71476A73C002496F2 /* trie_sort.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C38F1476A73C002496F2 /* trie_sort.hh */; };
+ 1EE8C3B81476A73C002496F2 /* trie.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3901476A73C002496F2 /* trie.cc */; };
+ 1EE8C3B91476A73C002496F2 /* trie.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3911476A73C002496F2 /* trie.hh */; };
+ 1EE8C3BA1476A73C002496F2 /* virtual_interface.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3921476A73C002496F2 /* virtual_interface.cc */; };
+ 1EE8C3BB1476A73C002496F2 /* virtual_interface.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3931476A73C002496F2 /* virtual_interface.hh */; };
+ 1EE8C3BC1476A73C002496F2 /* vocab.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C3941476A73C002496F2 /* vocab.cc */; };
+ 1EE8C3BD1476A73C002496F2 /* vocab.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3951476A73C002496F2 /* vocab.hh */; };
+ 1EE8C3BE1476A73C002496F2 /* weights.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3961476A73C002496F2 /* weights.hh */; };
+ 1EE8C3BF1476A73C002496F2 /* word_index.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C3971476A73C002496F2 /* word_index.hh */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 1EE8C2E91476A48E002496F2 /* liblm.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = liblm.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ 1EE8C3651476A73C002496F2 /* bhiksha.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = bhiksha.cc; sourceTree = "<group>"; };
+ 1EE8C3661476A73C002496F2 /* bhiksha.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = bhiksha.hh; sourceTree = "<group>"; };
+ 1EE8C3671476A73C002496F2 /* binary_format.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = binary_format.cc; sourceTree = "<group>"; };
+ 1EE8C3681476A73C002496F2 /* binary_format.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = binary_format.hh; sourceTree = "<group>"; };
+ 1EE8C3691476A73C002496F2 /* blank.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = blank.hh; sourceTree = "<group>"; };
+ 1EE8C36A1476A73C002496F2 /* build_binary.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = build_binary.cc; sourceTree = "<group>"; };
+ 1EE8C36D1476A73C002496F2 /* config.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = config.cc; sourceTree = "<group>"; };
+ 1EE8C36E1476A73C002496F2 /* config.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = config.hh; sourceTree = "<group>"; };
+ 1EE8C3711476A73C002496F2 /* enumerate_vocab.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = enumerate_vocab.hh; sourceTree = "<group>"; };
+ 1EE8C3721476A73C002496F2 /* facade.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = facade.hh; sourceTree = "<group>"; };
+ 1EE8C3731476A73C002496F2 /* left_test.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = left_test.cc; sourceTree = "<group>"; };
+ 1EE8C3741476A73C002496F2 /* left.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = left.hh; sourceTree = "<group>"; };
+ 1EE8C3761476A73C002496F2 /* lm_exception.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = lm_exception.cc; sourceTree = "<group>"; };
+ 1EE8C3771476A73C002496F2 /* lm_exception.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = lm_exception.hh; sourceTree = "<group>"; };
+ 1EE8C37B1476A73C002496F2 /* max_order.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = max_order.hh; sourceTree = "<group>"; };
+ 1EE8C37C1476A73C002496F2 /* model_test.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = model_test.cc; sourceTree = "<group>"; };
+ 1EE8C37D1476A73C002496F2 /* model_type.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = model_type.hh; sourceTree = "<group>"; };
+ 1EE8C37E1476A73C002496F2 /* model.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = model.cc; sourceTree = "<group>"; };
+ 1EE8C37F1476A73C002496F2 /* model.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = model.hh; sourceTree = "<group>"; };
+ 1EE8C3801476A73C002496F2 /* ngram_query.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ngram_query.cc; sourceTree = "<group>"; };
+ 1EE8C3811476A73C002496F2 /* quantize.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = quantize.cc; sourceTree = "<group>"; };
+ 1EE8C3821476A73C002496F2 /* quantize.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = quantize.hh; sourceTree = "<group>"; };
+ 1EE8C3831476A73C002496F2 /* read_arpa.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = read_arpa.cc; sourceTree = "<group>"; };
+ 1EE8C3841476A73C002496F2 /* read_arpa.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = read_arpa.hh; sourceTree = "<group>"; };
+ 1EE8C3861476A73C002496F2 /* return.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = return.hh; sourceTree = "<group>"; };
+ 1EE8C3871476A73C002496F2 /* search_hashed.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = search_hashed.cc; sourceTree = "<group>"; };
+ 1EE8C3881476A73C002496F2 /* search_hashed.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = search_hashed.hh; sourceTree = "<group>"; };
+ 1EE8C3891476A73C002496F2 /* search_trie.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = search_trie.cc; sourceTree = "<group>"; };
+ 1EE8C38A1476A73C002496F2 /* search_trie.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = search_trie.hh; sourceTree = "<group>"; };
+ 1EE8C38E1476A73C002496F2 /* trie_sort.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = trie_sort.cc; sourceTree = "<group>"; };
+ 1EE8C38F1476A73C002496F2 /* trie_sort.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = trie_sort.hh; sourceTree = "<group>"; };
+ 1EE8C3901476A73C002496F2 /* trie.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = trie.cc; sourceTree = "<group>"; };
+ 1EE8C3911476A73C002496F2 /* trie.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = trie.hh; sourceTree = "<group>"; };
+ 1EE8C3921476A73C002496F2 /* virtual_interface.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = virtual_interface.cc; sourceTree = "<group>"; };
+ 1EE8C3931476A73C002496F2 /* virtual_interface.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = virtual_interface.hh; sourceTree = "<group>"; };
+ 1EE8C3941476A73C002496F2 /* vocab.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = vocab.cc; sourceTree = "<group>"; };
+ 1EE8C3951476A73C002496F2 /* vocab.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = vocab.hh; sourceTree = "<group>"; };
+ 1EE8C3961476A73C002496F2 /* weights.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = weights.hh; sourceTree = "<group>"; };
+ 1EE8C3971476A73C002496F2 /* word_index.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = word_index.hh; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 1EE8C2E61476A48E002496F2 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 1EE8C2DE1476A48E002496F2 = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C3651476A73C002496F2 /* bhiksha.cc */,
+ 1EE8C3661476A73C002496F2 /* bhiksha.hh */,
+ 1EE8C3671476A73C002496F2 /* binary_format.cc */,
+ 1EE8C3681476A73C002496F2 /* binary_format.hh */,
+ 1EE8C3691476A73C002496F2 /* blank.hh */,
+ 1EE8C36A1476A73C002496F2 /* build_binary.cc */,
+ 1EE8C36D1476A73C002496F2 /* config.cc */,
+ 1EE8C36E1476A73C002496F2 /* config.hh */,
+ 1EE8C3711476A73C002496F2 /* enumerate_vocab.hh */,
+ 1EE8C3721476A73C002496F2 /* facade.hh */,
+ 1EE8C3731476A73C002496F2 /* left_test.cc */,
+ 1EE8C3741476A73C002496F2 /* left.hh */,
+ 1EE8C3761476A73C002496F2 /* lm_exception.cc */,
+ 1EE8C3771476A73C002496F2 /* lm_exception.hh */,
+ 1EE8C37B1476A73C002496F2 /* max_order.hh */,
+ 1EE8C37C1476A73C002496F2 /* model_test.cc */,
+ 1EE8C37D1476A73C002496F2 /* model_type.hh */,
+ 1EE8C37E1476A73C002496F2 /* model.cc */,
+ 1EE8C37F1476A73C002496F2 /* model.hh */,
+ 1EE8C3801476A73C002496F2 /* ngram_query.cc */,
+ 1EE8C3811476A73C002496F2 /* quantize.cc */,
+ 1EE8C3821476A73C002496F2 /* quantize.hh */,
+ 1EE8C3831476A73C002496F2 /* read_arpa.cc */,
+ 1EE8C3841476A73C002496F2 /* read_arpa.hh */,
+ 1EE8C3861476A73C002496F2 /* return.hh */,
+ 1EE8C3871476A73C002496F2 /* search_hashed.cc */,
+ 1EE8C3881476A73C002496F2 /* search_hashed.hh */,
+ 1EE8C3891476A73C002496F2 /* search_trie.cc */,
+ 1EE8C38A1476A73C002496F2 /* search_trie.hh */,
+ 1EE8C38E1476A73C002496F2 /* trie_sort.cc */,
+ 1EE8C38F1476A73C002496F2 /* trie_sort.hh */,
+ 1EE8C3901476A73C002496F2 /* trie.cc */,
+ 1EE8C3911476A73C002496F2 /* trie.hh */,
+ 1EE8C3921476A73C002496F2 /* virtual_interface.cc */,
+ 1EE8C3931476A73C002496F2 /* virtual_interface.hh */,
+ 1EE8C3941476A73C002496F2 /* vocab.cc */,
+ 1EE8C3951476A73C002496F2 /* vocab.hh */,
+ 1EE8C3961476A73C002496F2 /* weights.hh */,
+ 1EE8C3971476A73C002496F2 /* word_index.hh */,
+ 1EE8C2EA1476A48E002496F2 /* Products */,
+ );
+ sourceTree = "<group>";
+ };
+ 1EE8C2EA1476A48E002496F2 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C2E91476A48E002496F2 /* liblm.a */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXHeadersBuildPhase section */
+ 1EE8C2E71476A48E002496F2 /* Headers */ = {
+ isa = PBXHeadersBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1EE8C3991476A73C002496F2 /* bhiksha.hh in Headers */,
+ 1EE8C39B1476A73C002496F2 /* binary_format.hh in Headers */,
+ 1EE8C39C1476A73C002496F2 /* blank.hh in Headers */,
+ 1EE8C39F1476A73C002496F2 /* config.hh in Headers */,
+ 1EE8C3A01476A73C002496F2 /* enumerate_vocab.hh in Headers */,
+ 1EE8C3A11476A73C002496F2 /* facade.hh in Headers */,
+ 1EE8C3A31476A73C002496F2 /* left.hh in Headers */,
+ 1EE8C3A51476A73C002496F2 /* lm_exception.hh in Headers */,
+ 1EE8C3A71476A73C002496F2 /* max_order.hh in Headers */,
+ 1EE8C3A91476A73C002496F2 /* model_type.hh in Headers */,
+ 1EE8C3AB1476A73C002496F2 /* model.hh in Headers */,
+ 1EE8C3AE1476A73C002496F2 /* quantize.hh in Headers */,
+ 1EE8C3B01476A73C002496F2 /* read_arpa.hh in Headers */,
+ 1EE8C3B11476A73C002496F2 /* return.hh in Headers */,
+ 1EE8C3B31476A73C002496F2 /* search_hashed.hh in Headers */,
+ 1EE8C3B51476A73C002496F2 /* search_trie.hh in Headers */,
+ 1EE8C3B71476A73C002496F2 /* trie_sort.hh in Headers */,
+ 1EE8C3B91476A73C002496F2 /* trie.hh in Headers */,
+ 1EE8C3BB1476A73C002496F2 /* virtual_interface.hh in Headers */,
+ 1EE8C3BD1476A73C002496F2 /* vocab.hh in Headers */,
+ 1EE8C3BE1476A73C002496F2 /* weights.hh in Headers */,
+ 1EE8C3BF1476A73C002496F2 /* word_index.hh in Headers */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXHeadersBuildPhase section */
+
+/* Begin PBXNativeTarget section */
+ 1EE8C2E81476A48E002496F2 /* lm */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 1EE8C2ED1476A48E002496F2 /* Build configuration list for PBXNativeTarget "lm" */;
+ buildPhases = (
+ 1EE8C2E51476A48E002496F2 /* Sources */,
+ 1EE8C2E61476A48E002496F2 /* Frameworks */,
+ 1EE8C2E71476A48E002496F2 /* Headers */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = lm;
+ productName = lm;
+ productReference = 1EE8C2E91476A48E002496F2 /* liblm.a */;
+ productType = "com.apple.product-type.library.static";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 1EE8C2E01476A48E002496F2 /* Project object */ = {
+ isa = PBXProject;
+ buildConfigurationList = 1EE8C2E31476A48E002496F2 /* Build configuration list for PBXProject "lm" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ );
+ mainGroup = 1EE8C2DE1476A48E002496F2;
+ productRefGroup = 1EE8C2EA1476A48E002496F2 /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 1EE8C2E81476A48E002496F2 /* lm */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 1EE8C2E51476A48E002496F2 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1EE8C3981476A73C002496F2 /* bhiksha.cc in Sources */,
+ 1EE8C39A1476A73C002496F2 /* binary_format.cc in Sources */,
+ 1EE8C39D1476A73C002496F2 /* build_binary.cc in Sources */,
+ 1EE8C39E1476A73C002496F2 /* config.cc in Sources */,
+ 1EE8C3A21476A73C002496F2 /* left_test.cc in Sources */,
+ 1EE8C3A41476A73C002496F2 /* lm_exception.cc in Sources */,
+ 1EE8C3A81476A73C002496F2 /* model_test.cc in Sources */,
+ 1EE8C3AA1476A73C002496F2 /* model.cc in Sources */,
+ 1EE8C3AC1476A73C002496F2 /* ngram_query.cc in Sources */,
+ 1EE8C3AD1476A73C002496F2 /* quantize.cc in Sources */,
+ 1EE8C3AF1476A73C002496F2 /* read_arpa.cc in Sources */,
+ 1EE8C3B21476A73C002496F2 /* search_hashed.cc in Sources */,
+ 1EE8C3B41476A73C002496F2 /* search_trie.cc in Sources */,
+ 1EE8C3B61476A73C002496F2 /* trie_sort.cc in Sources */,
+ 1EE8C3B81476A73C002496F2 /* trie.cc in Sources */,
+ 1EE8C3BA1476A73C002496F2 /* virtual_interface.cc in Sources */,
+ 1EE8C3BC1476A73C002496F2 /* vocab.cc in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+ 1EE8C2EB1476A48E002496F2 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ARCHS = "$(ARCHS_STANDARD_64_BIT)";
+ COPY_PHASE_STRIP = NO;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_ENABLE_OBJC_EXCEPTIONS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_SYMBOLS_PRIVATE_EXTERN = NO;
+ GCC_VERSION = com.apple.compilers.llvm.clang.1_0;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ HEADER_SEARCH_PATHS = (
+ ../,
+ /opt/local/include,
+ );
+ MACOSX_DEPLOYMENT_TARGET = 10.7;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = macosx;
+ };
+ name = Debug;
+ };
+ 1EE8C2EC1476A48E002496F2 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ARCHS = "$(ARCHS_STANDARD_64_BIT)";
+ COPY_PHASE_STRIP = YES;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_ENABLE_OBJC_EXCEPTIONS = YES;
+ GCC_VERSION = com.apple.compilers.llvm.clang.1_0;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ HEADER_SEARCH_PATHS = (
+ ../,
+ /opt/local/include,
+ );
+ MACOSX_DEPLOYMENT_TARGET = 10.7;
+ SDKROOT = macosx;
+ };
+ name = Release;
+ };
+ 1EE8C2EE1476A48E002496F2 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ EXECUTABLE_PREFIX = lib;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ 1EE8C2EF1476A48E002496F2 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ EXECUTABLE_PREFIX = lib;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 1EE8C2E31476A48E002496F2 /* Build configuration list for PBXProject "lm" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1EE8C2EB1476A48E002496F2 /* Debug */,
+ 1EE8C2EC1476A48E002496F2 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 1EE8C2ED1476A48E002496F2 /* Build configuration list for PBXNativeTarget "lm" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1EE8C2EE1476A48E002496F2 /* Debug */,
+ 1EE8C2EF1476A48E002496F2 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 1EE8C2E01476A48E002496F2 /* Project object */;
+}
diff --git a/lm/model_test.cc b/lm/model_test.cc
index 2654071f8..461704d43 100644
--- a/lm/model_test.cc
+++ b/lm/model_test.cc
@@ -19,6 +19,20 @@ std::ostream &operator<<(std::ostream &o, const State &state) {
namespace {
+const char *TestLocation() {
+ if (boost::unit_test::framework::master_test_suite().argc < 2) {
+ return "test.arpa";
+ }
+ return boost::unit_test::framework::master_test_suite().argv[1];
+}
+const char *TestNoUnkLocation() {
+ if (boost::unit_test::framework::master_test_suite().argc < 3) {
+ return "test_nounk.arpa";
+ }
+ return boost::unit_test::framework::master_test_suite().argv[2];
+
+}
+
#define StartTest(word, ngram, score, indep_left) \
ret = model.FullScore( \
state, \
@@ -307,7 +321,7 @@ template <class ModelT> void LoadingTest() {
{
ExpectEnumerateVocab enumerate;
config.enumerate_vocab = &enumerate;
- ModelT m("test.arpa", config);
+ ModelT m(TestLocation(), config);
enumerate.Check(m.GetVocabulary());
BOOST_CHECK_EQUAL((WordIndex)37, m.GetVocabulary().Bound());
Everything(m);
@@ -315,7 +329,7 @@ template <class ModelT> void LoadingTest() {
{
ExpectEnumerateVocab enumerate;
config.enumerate_vocab = &enumerate;
- ModelT m("test_nounk.arpa", config);
+ ModelT m(TestNoUnkLocation(), config);
enumerate.Check(m.GetVocabulary());
BOOST_CHECK_EQUAL((WordIndex)37, m.GetVocabulary().Bound());
NoUnkCheck(m);
@@ -346,7 +360,7 @@ template <class ModelT> void BinaryTest() {
config.enumerate_vocab = &enumerate;
{
- ModelT copy_model("test.arpa", config);
+ ModelT copy_model(TestLocation(), config);
enumerate.Check(copy_model.GetVocabulary());
enumerate.Clear();
Everything(copy_model);
@@ -370,14 +384,14 @@ template <class ModelT> void BinaryTest() {
config.messages = NULL;
enumerate.Clear();
{
- ModelT copy_model("test_nounk.arpa", config);
+ ModelT copy_model(TestNoUnkLocation(), config);
enumerate.Check(copy_model.GetVocabulary());
enumerate.Clear();
NoUnkCheck(copy_model);
}
config.write_mmap = NULL;
{
- ModelT binary("test_nounk.binary", config);
+ ModelT binary(TestNoUnkLocation(), config);
enumerate.Check(binary.GetVocabulary());
NoUnkCheck(binary);
}
diff --git a/lm/ngram_query.cc b/lm/ngram_query.cc
index 6e9874673..1b2cd5db3 100644
--- a/lm/ngram_query.cc
+++ b/lm/ngram_query.cc
@@ -94,34 +94,39 @@ int main(int argc, char *argv[]) {
std::cerr << "Input is wrapped in <s> and </s> unless null is passed." << std::endl;
return 1;
}
- bool sentence_context = (argc == 2);
- lm::ngram::ModelType model_type;
- if (lm::ngram::RecognizeBinary(argv[1], model_type)) {
- switch(model_type) {
- case lm::ngram::HASH_PROBING:
- Query<lm::ngram::ProbingModel>(argv[1], sentence_context);
- break;
- case lm::ngram::TRIE_SORTED:
- Query<lm::ngram::TrieModel>(argv[1], sentence_context);
- break;
- case lm::ngram::QUANT_TRIE_SORTED:
- Query<lm::ngram::QuantTrieModel>(argv[1], sentence_context);
- break;
- case lm::ngram::ARRAY_TRIE_SORTED:
- Query<lm::ngram::ArrayTrieModel>(argv[1], sentence_context);
- break;
- case lm::ngram::QUANT_ARRAY_TRIE_SORTED:
- Query<lm::ngram::QuantArrayTrieModel>(argv[1], sentence_context);
- break;
- case lm::ngram::HASH_SORTED:
- default:
- std::cerr << "Unrecognized kenlm model type " << model_type << std::endl;
- abort();
+ try {
+ bool sentence_context = (argc == 2);
+ lm::ngram::ModelType model_type;
+ if (lm::ngram::RecognizeBinary(argv[1], model_type)) {
+ switch(model_type) {
+ case lm::ngram::HASH_PROBING:
+ Query<lm::ngram::ProbingModel>(argv[1], sentence_context);
+ break;
+ case lm::ngram::TRIE_SORTED:
+ Query<lm::ngram::TrieModel>(argv[1], sentence_context);
+ break;
+ case lm::ngram::QUANT_TRIE_SORTED:
+ Query<lm::ngram::QuantTrieModel>(argv[1], sentence_context);
+ break;
+ case lm::ngram::ARRAY_TRIE_SORTED:
+ Query<lm::ngram::ArrayTrieModel>(argv[1], sentence_context);
+ break;
+ case lm::ngram::QUANT_ARRAY_TRIE_SORTED:
+ Query<lm::ngram::QuantArrayTrieModel>(argv[1], sentence_context);
+ break;
+ case lm::ngram::HASH_SORTED:
+ default:
+ std::cerr << "Unrecognized kenlm model type " << model_type << std::endl;
+ abort();
+ }
+ } else {
+ Query<lm::ngram::ProbingModel>(argv[1], sentence_context);
}
- } else {
- Query<lm::ngram::ProbingModel>(argv[1], sentence_context);
- }
- PrintUsage("Total time including destruction:\n");
+ PrintUsage("Total time including destruction:\n");
+ } catch (const std::exception &e) {
+ std::cerr << e.what() << std::endl;
+ return 1;
+ }
return 0;
}
diff --git a/lm/search_hashed.cc b/lm/search_hashed.cc
index 247832b0a..f803b632e 100644
--- a/lm/search_hashed.cc
+++ b/lm/search_hashed.cc
@@ -30,7 +30,7 @@ template <class Middle> class ActivateLowerMiddle {
// TODO: somehow get text of n-gram for this error message.
if (!modify_.UnsafeMutableFind(hash, i))
UTIL_THROW(FormatLoadException, "The context of every " << n << "-gram should appear as a " << (n-1) << "-gram");
- SetExtension(i->MutableValue().backoff);
+ SetExtension(i->value.backoff);
}
private:
@@ -65,7 +65,7 @@ template <class Middle> void FixSRI(int lower, float negative_lower_prob, unsign
blank.prob -= unigrams[vocab_ids[1]].backoff;
SetExtension(unigrams[vocab_ids[1]].backoff);
// Bigram including a unigram's backoff
- middle[0].Insert(Middle::Packing::Make(keys[0], blank));
+ middle[0].Insert(detail::ProbBackoffEntry::Make(keys[0], blank));
fix = 1;
} else {
for (unsigned int i = 3; i < fix + 2; ++i) backoff_hash = detail::CombineWordHash(backoff_hash, vocab_ids[i]);
@@ -74,11 +74,11 @@ template <class Middle> void FixSRI(int lower, float negative_lower_prob, unsign
for (; fix <= n - 3; ++fix) {
typename Middle::MutableIterator gotit;
if (middle[fix - 1].UnsafeMutableFind(backoff_hash, gotit)) {
- float &backoff = gotit->MutableValue().backoff;
+ float &backoff = gotit->value.backoff;
SetExtension(backoff);
blank.prob -= backoff;
}
- middle[fix].Insert(Middle::Packing::Make(keys[fix], blank));
+ middle[fix].Insert(detail::ProbBackoffEntry::Make(keys[fix], blank));
backoff_hash = detail::CombineWordHash(backoff_hash, vocab_ids[fix + 2]);
}
}
@@ -89,7 +89,7 @@ template <class Voc, class Store, class Middle, class Activate> void ReadNGrams(
// vocab ids of words in reverse order
std::vector<WordIndex> vocab_ids(n);
std::vector<uint64_t> keys(n-1);
- typename Store::Packing::Value value;
+ typename Store::Entry::Value value;
typename Middle::MutableIterator found;
for (size_t i = 0; i < count; ++i) {
ReadNGram(f, n, vocab, &*vocab_ids.begin(), value, warn);
@@ -100,7 +100,7 @@ template <class Voc, class Store, class Middle, class Activate> void ReadNGrams(
}
// Initially the sign bit is on, indicating it does not extend left. Most already have this but there might +0.0.
util::SetSign(value.prob);
- store.Insert(Store::Packing::Make(keys[n-2], value));
+ store.Insert(Store::Entry::Make(keys[n-2], value));
// Go back and find the longest right-aligned entry, informing it that it extends left. Normally this will match immediately, but sometimes SRI is dumb.
int lower;
util::FloatEnc fix_prob;
@@ -113,9 +113,9 @@ template <class Voc, class Store, class Middle, class Activate> void ReadNGrams(
}
if (middle[lower].UnsafeMutableFind(keys[lower], found)) {
// Turn off sign bit to indicate that it extends left.
- fix_prob.f = found->MutableValue().prob;
+ fix_prob.f = found->value.prob;
fix_prob.i &= ~util::kSignBit;
- found->MutableValue().prob = fix_prob.f;
+ found->value.prob = fix_prob.f;
// We don't need to recurse further down because this entry already set the bits for lower entries.
break;
}
diff --git a/lm/search_hashed.hh b/lm/search_hashed.hh
index e289fd114..96b03013e 100644
--- a/lm/search_hashed.hh
+++ b/lm/search_hashed.hh
@@ -8,7 +8,6 @@
#include "lm/weights.hh"
#include "util/bit_packing.hh"
-#include "util/key_value_packing.hh"
#include "util/probing_hash_table.hh"
#include <algorithm>
@@ -105,7 +104,7 @@ template <class MiddleT, class LongestT> class TemplateHashedSearch : public Has
std::cerr << "Extend pointer " << extend_pointer << " should have been found for length " << (unsigned) extend_length << std::endl;
abort();
}
- val.f = found->GetValue().prob;
+ val.f = found->value.prob;
}
val.i |= util::kSignBit;
prob = val.f;
@@ -117,12 +116,12 @@ template <class MiddleT, class LongestT> class TemplateHashedSearch : public Has
typename Middle::ConstIterator found;
if (!middle.Find(node, found)) return false;
util::FloatEnc enc;
- enc.f = found->GetValue().prob;
+ enc.f = found->value.prob;
ret.independent_left = (enc.i & util::kSignBit);
ret.extend_left = node;
enc.i |= util::kSignBit;
ret.prob = enc.f;
- backoff = found->GetValue().backoff;
+ backoff = found->value.backoff;
return true;
}
@@ -132,7 +131,7 @@ template <class MiddleT, class LongestT> class TemplateHashedSearch : public Has
node = CombineWordHash(node, word);
typename Middle::ConstIterator found;
if (!middle.Find(node, found)) return false;
- backoff = found->GetValue().backoff;
+ backoff = found->value.backoff;
return true;
}
@@ -141,7 +140,7 @@ template <class MiddleT, class LongestT> class TemplateHashedSearch : public Has
node = CombineWordHash(node, word);
typename Longest::ConstIterator found;
if (!longest.Find(node, found)) return false;
- prob = found->GetValue().prob;
+ prob = found->value.prob;
return true;
}
@@ -160,14 +159,50 @@ template <class MiddleT, class LongestT> class TemplateHashedSearch : public Has
std::vector<Middle> middle_;
};
-// std::identity is an SGI extension :-(
-struct IdentityHash : public std::unary_function<uint64_t, size_t> {
- size_t operator()(uint64_t arg) const { return static_cast<size_t>(arg); }
+/* These look like perfect candidates for a template, right? Ancient gcc (4.1
+ * on RedHat stale linux) doesn't pack templates correctly. ProbBackoffEntry
+ * is a multiple of 8 bytes anyway. ProbEntry is 12 bytes so it's set to pack.
+ */
+struct ProbBackoffEntry {
+ uint64_t key;
+ ProbBackoff value;
+ typedef uint64_t Key;
+ typedef ProbBackoff Value;
+ uint64_t GetKey() const {
+ return key;
+ }
+ static ProbBackoffEntry Make(uint64_t key, ProbBackoff value) {
+ ProbBackoffEntry ret;
+ ret.key = key;
+ ret.value = value;
+ return ret;
+ }
+};
+
+#pragma pack(push)
+#pragma pack(4)
+struct ProbEntry {
+ uint64_t key;
+ Prob value;
+ typedef uint64_t Key;
+ typedef Prob Value;
+ uint64_t GetKey() const {
+ return key;
+ }
+ static ProbEntry Make(uint64_t key, Prob value) {
+ ProbEntry ret;
+ ret.key = key;
+ ret.value = value;
+ return ret;
+ }
};
+#pragma pack(pop)
+
+
struct ProbingHashedSearch : public TemplateHashedSearch<
- util::ProbingHashTable<util::ByteAlignedPacking<uint64_t, ProbBackoff>, IdentityHash>,
- util::ProbingHashTable<util::ByteAlignedPacking<uint64_t, Prob>, IdentityHash> > {
+ util::ProbingHashTable<ProbBackoffEntry, util::IdentityHash>,
+ util::ProbingHashTable<ProbEntry, util::IdentityHash> > {
static const ModelType kModelType = HASH_PROBING;
};
diff --git a/lm/search_trie.cc b/lm/search_trie.cc
index 8cb6984b0..f36d9c53c 100644
--- a/lm/search_trie.cc
+++ b/lm/search_trie.cc
@@ -377,7 +377,7 @@ template <class Doing> class BlankManager {
template <class Doing> void RecursiveInsert(const unsigned char total_order, const WordIndex unigram_count, RecordReader *input, std::ostream *progress_out, const char *message, Doing &doing) {
util::ErsatzProgress progress(progress_out, message, unigram_count + 1);
- unsigned int unigram = 0;
+ WordIndex unigram = 0;
std::priority_queue<Gram> grams;
grams.push(Gram(&unigram, 1));
for (unsigned char i = 2; i <= total_order; ++i) {
diff --git a/lm/vocab.cc b/lm/vocab.cc
index 5ac828178..c10743ceb 100644
--- a/lm/vocab.cc
+++ b/lm/vocab.cc
@@ -13,6 +13,8 @@
#include <string>
+#include <string.h>
+
namespace lm {
namespace ngram {
@@ -30,16 +32,26 @@ const uint64_t kUnknownHash = detail::HashForVocab("<unk>", 5);
// Sadly some LMs have <UNK>.
const uint64_t kUnknownCapHash = detail::HashForVocab("<UNK>", 5);
-WordIndex ReadWords(int fd, EnumerateVocab *enumerate) {
- if (!enumerate) return std::numeric_limits<WordIndex>::max();
+void ReadWords(int fd, EnumerateVocab *enumerate, WordIndex expected_count) {
+ // Check that we're at the right place by reading <unk> which is always first.
+ char check_unk[6];
+ util::ReadOrThrow(fd, check_unk, 6);
+ UTIL_THROW_IF(
+ memcmp(check_unk, "<unk>", 6),
+ FormatLoadException,
+ "Vocabulary words are in the wrong place. This could be because the binary file was built with stale gcc and old kenlm. Stale gcc, including the gcc distributed with RedHat and OS X, has a bug that ignores pragma pack for template-dependent types. New kenlm works around this, so you'll save memory but have to rebuild any binary files using the probing data structure.");
+ if (!enumerate) return;
+ enumerate->Add(0, "<unk>");
+
+ // Read all the words after unk.
const std::size_t kInitialRead = 16384;
std::string buf;
buf.reserve(kInitialRead + 100);
buf.resize(kInitialRead);
- WordIndex index = 0;
+ WordIndex index = 1; // Read <unk> already.
while (true) {
std::size_t got = util::ReadOrEOF(fd, &buf[0], kInitialRead);
- if (got == 0) return index;
+ if (got == 0) break;
buf.resize(got);
while (buf[buf.size() - 1]) {
char next_char;
@@ -53,6 +65,8 @@ WordIndex ReadWords(int fd, EnumerateVocab *enumerate) {
i += length + 1 /* null byte */;
}
}
+
+ UTIL_THROW_IF(expected_count != index, FormatLoadException, "The binary file has the wrong number of words at the end. This could be caused by a truncated binary file.");
}
} // namespace
@@ -130,9 +144,9 @@ void SortedVocabulary::FinishedLoading(ProbBackoff *reorder_vocab) {
void SortedVocabulary::LoadedBinary(int fd, EnumerateVocab *to) {
end_ = begin_ + *(reinterpret_cast<const uint64_t*>(begin_) - 1);
- ReadWords(fd, to);
SetSpecial(Index("<s>"), Index("</s>"), 0);
bound_ = end_ - begin_ + 1;
+ ReadWords(fd, to, bound_);
}
namespace {
@@ -175,7 +189,7 @@ WordIndex ProbingVocabulary::Insert(const StringPiece &str) {
return 0;
} else {
if (enumerate_) enumerate_->Add(bound_, str);
- lookup_.Insert(Lookup::Packing::Make(hashed, bound_));
+ lookup_.Insert(ProbingVocabuaryEntry::Make(hashed, bound_));
return bound_++;
}
}
@@ -190,9 +204,9 @@ void ProbingVocabulary::FinishedLoading(ProbBackoff * /*reorder_vocab*/) {
void ProbingVocabulary::LoadedBinary(int fd, EnumerateVocab *to) {
UTIL_THROW_IF(header_->version != kProbingVocabularyVersion, FormatLoadException, "The binary file has probing version " << header_->version << " but the code expects version " << kProbingVocabularyVersion << ". Please rerun build_binary using the same version of the code.");
lookup_.LoadedBinary();
- ReadWords(fd, to);
bound_ = header_->bound;
SetSpecial(Index("<s>"), Index("</s>"), 0);
+ ReadWords(fd, to, bound_);
}
void MissingUnknown(const Config &config) throw(SpecialWordMissingException) {
@@ -215,7 +229,7 @@ void MissingSentenceMarker(const Config &config, const char *str) throw(SpecialW
if (config.messages) *config.messages << "Missing special word " << str << "; will treat it as <unk>.";
break;
case THROW_UP:
- UTIL_THROW(SpecialWordMissingException, "The ARPA file is missing " << str << " and the model is configured to reject these models. Run build_binary -s to disable this check.");
+ UTIL_THROW(SpecialWordMissingException, "The ARPA file is missing " << str << " and the model is configured to reject these models. If you built your APRA with IRSTLM and forgot to run add-start-end.sh, complain to <bertoldi at fbk.eu> stating that you think build-lm.sh should do this by default, then go back and retrain your model from the start. To bypass this check and treat " << str << " as an OOV, pass -s. The resulting model will not work with e.g. Moses.");
}
}
diff --git a/lm/vocab.hh b/lm/vocab.hh
index 3c3414fb9..48db3d627 100644
--- a/lm/vocab.hh
+++ b/lm/vocab.hh
@@ -4,7 +4,6 @@
#include "lm/enumerate_vocab.hh"
#include "lm/lm_exception.hh"
#include "lm/virtual_interface.hh"
-#include "util/key_value_packing.hh"
#include "util/probing_hash_table.hh"
#include "util/sorted_uniform.hh"
#include "util/string_piece.hh"
@@ -100,6 +99,26 @@ class SortedVocabulary : public base::Vocabulary {
std::vector<std::string> strings_to_enumerate_;
};
+#pragma pack(push)
+#pragma pack(4)
+struct ProbingVocabuaryEntry {
+ uint64_t key;
+ WordIndex value;
+
+ typedef uint64_t Key;
+ uint64_t GetKey() const {
+ return key;
+ }
+
+ static ProbingVocabuaryEntry Make(uint64_t key, WordIndex value) {
+ ProbingVocabuaryEntry ret;
+ ret.key = key;
+ ret.value = value;
+ return ret;
+ }
+};
+#pragma pack(pop)
+
// Vocabulary storing a map from uint64_t to WordIndex.
class ProbingVocabulary : public base::Vocabulary {
public:
@@ -107,7 +126,7 @@ class ProbingVocabulary : public base::Vocabulary {
WordIndex Index(const StringPiece &str) const {
Lookup::ConstIterator i;
- return lookup_.Find(detail::HashForVocab(str), i) ? i->GetValue() : 0;
+ return lookup_.Find(detail::HashForVocab(str), i) ? i->value : 0;
}
static size_t Size(std::size_t entries, const Config &config);
@@ -129,12 +148,7 @@ class ProbingVocabulary : public base::Vocabulary {
void LoadedBinary(int fd, EnumerateVocab *to);
private:
- // std::identity is an SGI extension :-(
- struct IdentityHash : public std::unary_function<uint64_t, std::size_t> {
- std::size_t operator()(uint64_t arg) const { return static_cast<std::size_t>(arg); }
- };
-
- typedef util::ProbingHashTable<util::ByteAlignedPacking<uint64_t, WordIndex>, IdentityHash> Lookup;
+ typedef util::ProbingHashTable<ProbingVocabuaryEntry, util::IdentityHash> Lookup;
Lookup lookup_;
diff --git a/m4/ax_xmlrpc_c.m4 b/m4/ax_xmlrpc_c.m4
deleted file mode 100644
index a45760fff..000000000
--- a/m4/ax_xmlrpc_c.m4
+++ /dev/null
@@ -1,52 +0,0 @@
-AC_DEFUN([AX_XMLRPC_C], [
- AC_MSG_CHECKING(for XMLRPC-C)
-
- AC_ARG_WITH(xmlrpc-c,
- [ --with-xmlrpc-c=PATH Enable XMLRPC-C support. Setting the PATH to yes will search for xmlrpc-c-config on the shell PATH,],
- [
- if test "$withval" = "no"; then
- AC_MSG_RESULT(no)
-
- else
- if test "$withval" = "yes"; then
- xmlrpc_cc_prg="xmlrpc-c-config"
- else
- xmlrpc_cc_prg="$withval"
- fi
-
- if eval $xmlrpc_cc_prg --version 2>/dev/null >/dev/null; then
- XMLRPC_C_CPPFLAGS=`$xmlrpc_cc_prg --cflags c++2 abyss-server`
- XMLRPC_C_LIBS=`$xmlrpc_cc_prg c++2 abyss-server --libs`
- CXXFLAGS_SAVED=$CXXFLAGS
- CXXFLAGS="$CXXFLAGS $XMLRPC_C_CPPFLAGS"
- LIBS_SAVED=$LIBS
- LIBS="$LIBS $XMLRPC_C_LIBS"
-
- AC_TRY_LINK(
- [ #include <xmlrpc-c/server.h>
- ],[ xmlrpc_registry_new(NULL); ],
- [
- AC_MSG_RESULT(ok)
- ], [
- AC_MSG_RESULT(failed)
- AC_MSG_ERROR(Could not compile XMLRPC-C test.)
- ])
-
-dnl AC_DEFINE(HAVE_XMLRPC_C, 1, Support for XMLRPC-C.)
- have_xmlrpc_c=yes
- AC_SUBST(XMLRPC_C_LIBS)
- AC_SUBST(XMLRPC_C_CPPFLAGS)
-
- LIBS=$LIBS_SAVED
- CXXFLAGS=$CXXFLAGS_SAVED
-
- else
- AC_MSG_RESULT(failed)
- AC_MSG_ERROR(Could not compile XMLRPC-C test.)
- fi
- fi
-
- ],[
- AC_MSG_RESULT(ignored)
- ])
-])
diff --git a/m4/boost.m4 b/m4/boost.m4
deleted file mode 100644
index a36766870..000000000
--- a/m4/boost.m4
+++ /dev/null
@@ -1,1045 +0,0 @@
-# boost.m4: Locate Boost headers and libraries for autoconf-based projects.
-# Copyright (C) 2007, 2008, 2009 Benoit Sigoure <tsuna@lrde.epita.fr>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Additional permission under section 7 of the GNU General Public
-# License, version 3 ("GPLv3"):
-#
-# If you convey this file as part of a work that contains a
-# configuration script generated by Autoconf, you may do so under
-# terms of your choice.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-m4_define([_BOOST_SERIAL], [m4_translit([
-# serial 12
-], [#
-], [])])
-
-# Original sources can be found at http://github.com/tsuna/boost.m4
-# You can fetch the latest version of the script by doing:
-# wget http://github.com/tsuna/boost.m4/raw/master/build-aux/boost.m4
-
-# ------ #
-# README #
-# ------ #
-
-# This file provides several macros to use the various Boost libraries.
-# The first macro is BOOST_REQUIRE. It will simply check if it's possible to
-# find the Boost headers of a given (optional) minimum version and it will
-# define BOOST_CPPFLAGS accordingly. It will add an option --with-boost to
-# your configure so that users can specify non standard locations.
-# If the user's environment contains BOOST_ROOT and --with-boost was not
-# specified, --with-boost=$BOOST_ROOT is implicitly used.
-# For more README and documentation, go to http://github.com/tsuna/boost.m4
-# Note: THESE MACROS ASSUME THAT YOU USE LIBTOOL. If you don't, don't worry,
-# simply read the README, it will show you what to do step by step.
-
-m4_pattern_forbid([^_?BOOST_])
-
-
-# _BOOST_SED_CPP(SED-PROGRAM, PROGRAM,
-# [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
-# --------------------------------------------------------
-# Same as AC_EGREP_CPP, but leave the result in conftest.i.
-# PATTERN is *not* overquoted, as in AC_EGREP_CPP. It could be useful
-# to turn this into a macro which extracts the value of any macro.
-m4_define([_BOOST_SED_CPP],
-[AC_LANG_PREPROC_REQUIRE()dnl
-AC_REQUIRE([AC_PROG_SED])dnl
-AC_LANG_CONFTEST([AC_LANG_SOURCE([[$2]])])
-AS_IF([dnl eval is necessary to expand ac_cpp.
-dnl Ultrix and Pyramid sh refuse to redirect output of eval, so use subshell.
-dnl Beware of Windows end-of-lines, for instance if we are running
-dnl some Windows programs under Wine. In that case, boost/version.hpp
-dnl is certainly using "\r\n", but the regular Unix shell will only
-dnl strip `\n' with backquotes, not the `\r'. This results in
-dnl boost_cv_lib_version='1_37\r' for instance, which breaks
-dnl everything else.
-dnl Cannot use 'dnl' after [$4] because a trailing dnl may break AC_CACHE_CHECK
-(eval "$ac_cpp conftest.$ac_ext") 2>&AS_MESSAGE_LOG_FD |
- tr -d '\r' |
- $SED -n -e "$1" >conftest.i 2>&1],
- [$3],
- [$4])
-rm -rf conftest*
-])# AC_EGREP_CPP
-
-
-
-# BOOST_REQUIRE([VERSION], [ACTION-IF-NOT-FOUND])
-# -----------------------------------------------
-# Look for Boost. If version is given, it must either be a literal of the form
-# "X.Y.Z" where X, Y and Z are integers (the ".Z" part being optional) or a
-# variable "$var".
-# Defines the value BOOST_CPPFLAGS. This macro only checks for headers with
-# the required version, it does not check for any of the Boost libraries.
-# On # success, defines HAVE_BOOST. On failure, calls the optional
-# ACTION-IF-NOT-FOUND action if one was supplied.
-# Otherwise aborts with an error message.
-AC_DEFUN([BOOST_REQUIRE],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_PROG_GREP])dnl
-echo "$as_me: this is boost.m4[]_BOOST_SERIAL" >&AS_MESSAGE_LOG_FD
-boost_save_IFS=$IFS
-boost_version_req=$1
-IFS=.
-set x $boost_version_req 0 0 0
-IFS=$boost_save_IFS
-shift
-boost_version_req=`expr "$[1]" '*' 100000 + "$[2]" '*' 100 + "$[3]"`
-AC_ARG_WITH([boost],
- [AS_HELP_STRING([--with-boost=DIR],
- [prefix of Boost $1 @<:@guess@:>@])])dnl
-AC_ARG_VAR([BOOST_ROOT],[Location of Boost installation])dnl
-# If BOOST_ROOT is set and the user has not provided a value to
-# --with-boost, then treat BOOST_ROOT as if it the user supplied it.
-if test x"$BOOST_ROOT" != x; then
- if test x"$with_boost" = x; then
- AC_MSG_NOTICE([Detected BOOST_ROOT; continuing with --with-boost=$BOOST_ROOT])
- with_boost=$BOOST_ROOT
- else
- AC_MSG_NOTICE([Detected BOOST_ROOT=$BOOST_ROOT, but overridden by --with-boost=$with_boost])
- fi
-fi
-AC_SUBST([DISTCHECK_CONFIGURE_FLAGS],
- ["$DISTCHECK_CONFIGURE_FLAGS '--with-boost=$with_boost'"])
-boost_save_CPPFLAGS=$CPPFLAGS
- AC_CACHE_CHECK([for Boost headers version >= $boost_version_req],
- [boost_cv_inc_path],
- [boost_cv_inc_path=no
-AC_LANG_PUSH([C++])dnl
-m4_pattern_allow([^BOOST_VERSION$])dnl
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([[#include <boost/version.hpp>
-#if !defined BOOST_VERSION
-# error BOOST_VERSION is not defined
-#elif BOOST_VERSION < $boost_version_req
-# error Boost headers version < $boost_version_req
-#endif
-]])])
- # If the user provided a value to --with-boost, use it and only it.
- case $with_boost in #(
- ''|yes) set x '' /opt/local/include /usr/local/include /opt/include \
- /usr/include C:/Boost/include;; #(
- *) set x "$with_boost/include" "$with_boost";;
- esac
- shift
- for boost_dir
- do
- # Without --layout=system, Boost (or at least some versions) installs
- # itself in <prefix>/include/boost-<version>. This inner loop helps to
- # find headers in such directories.
- #
- # Any ${boost_dir}/boost-x_xx directories are searched in reverse version
- # order followed by ${boost_dir}. The final '.' is a sentinel for
- # searching $boost_dir" itself. Entries are whitespace separated.
- #
- # I didn't indent this loop on purpose (to avoid over-indented code)
- boost_layout_system_search_list=`cd "$boost_dir" 2>/dev/null \
- && ls -1 | "${GREP}" '^boost-' | sort -rn -t- -k2 \
- && echo .`
- for boost_inc in $boost_layout_system_search_list
- do
- if test x"$boost_inc" != x.; then
- boost_inc="$boost_dir/$boost_inc"
- else
- boost_inc="$boost_dir" # Uses sentinel in boost_layout_system_search_list
- fi
- if test x"$boost_inc" != x; then
- # We are going to check whether the version of Boost installed
- # in $boost_inc is usable by running a compilation that
- # #includes it. But if we pass a -I/some/path in which Boost
- # is not installed, the compiler will just skip this -I and
- # use other locations (either from CPPFLAGS, or from its list
- # of system include directories). As a result we would use
- # header installed on the machine instead of the /some/path
- # specified by the user. So in that precise case (trying
- # $boost_inc), make sure the version.hpp exists.
- #
- # Use test -e as there can be symlinks.
- test -e "$boost_inc/boost/version.hpp" || continue
- CPPFLAGS="$CPPFLAGS -I$boost_inc"
- fi
- AC_COMPILE_IFELSE([], [boost_cv_inc_path=yes], [boost_cv_version=no])
- if test x"$boost_cv_inc_path" = xyes; then
- if test x"$boost_inc" != x; then
- boost_cv_inc_path=$boost_inc
- fi
- break 2
- fi
- done
- done
-AC_LANG_POP([C++])dnl
- ])
- case $boost_cv_inc_path in #(
- no)
- boost_errmsg="cannot find Boost headers version >= $boost_version_req"
- m4_if([$2], [], [AC_MSG_ERROR([$boost_errmsg])],
- [AC_MSG_NOTICE([$boost_errmsg])])
- $2
- ;;#(
- yes)
- BOOST_CPPFLAGS=
- AC_DEFINE([HAVE_BOOST], [1],
- [Defined if the requested minimum BOOST version is satisfied])
- ;;#(
- *)
- AC_SUBST([BOOST_CPPFLAGS], ["-I$boost_cv_inc_path"])
- ;;
- esac
- AC_CACHE_CHECK([for Boost's header version],
- [boost_cv_lib_version],
- [m4_pattern_allow([^BOOST_LIB_VERSION$])dnl
- _BOOST_SED_CPP([/^boost-lib-version = /{s///;s/\"//g;p;g;}],
- [#include <boost/version.hpp>
-boost-lib-version = BOOST_LIB_VERSION],
- [boost_cv_lib_version=`cat conftest.i`])])
- # e.g. "134" for 1_34_1 or "135" for 1_35
- boost_major_version=`echo "$boost_cv_lib_version" | sed 's/_//;s/_.*//'`
- case $boost_major_version in #(
- '' | *[[!0-9]]*)
- AC_MSG_ERROR([invalid value: boost_major_version=$boost_major_version])
- ;;
- esac
-CPPFLAGS=$boost_save_CPPFLAGS
-])# BOOST_REQUIRE
-
-# BOOST_STATIC()
-# --------------
-# Add the "--enable-static-boost" configure argument. If this argument is given
-# on the command line, static versions of the libraries will be looked up.
-AC_DEFUN([BOOST_STATIC],
- [AC_ARG_ENABLE([static-boost],
- [AC_HELP_STRING([--enable-static-boost],
- [Prefer the static boost libraries over the shared ones [no]])],
- [enable_static_boost=yes],
- [enable_static_boost=no])])# BOOST_STATIC
-
-# BOOST_FIND_HEADER([HEADER-NAME], [ACTION-IF-NOT-FOUND], [ACTION-IF-FOUND])
-# --------------------------------------------------------------------------
-# Wrapper around AC_CHECK_HEADER for Boost headers. Useful to check for
-# some parts of the Boost library which are only made of headers and don't
-# require linking (such as Boost.Foreach).
-#
-# Default ACTION-IF-NOT-FOUND: Fail with a fatal error unless Boost couldn't be
-# found in the first place, in which case by default a notice is issued to the
-# user. Presumably if we haven't died already it's because it's OK to not have
-# Boost, which is why only a notice is issued instead of a hard error.
-#
-# Default ACTION-IF-FOUND: define the preprocessor symbol HAVE_<HEADER-NAME> in
-# case of success # (where HEADER-NAME is written LIKE_THIS, e.g.,
-# HAVE_BOOST_FOREACH_HPP).
-AC_DEFUN([BOOST_FIND_HEADER],
-[AC_REQUIRE([BOOST_REQUIRE])dnl
-if test x"$boost_cv_inc_path" = xno; then
- m4_default([$2], [AC_MSG_NOTICE([Boost not available, not searching for $1])])
-else
-AC_LANG_PUSH([C++])dnl
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-AC_CHECK_HEADER([$1],
- [m4_default([$3], [AC_DEFINE(AS_TR_CPP([HAVE_$1]), [1],
- [Define to 1 if you have <$1>])])],
- [m4_default([$2], [AC_MSG_ERROR([cannot find $1])])])
-CPPFLAGS=$boost_save_CPPFLAGS
-AC_LANG_POP([C++])dnl
-fi
-])# BOOST_FIND_HEADER
-
-
-# BOOST_FIND_LIB([LIB-NAME], [PREFERRED-RT-OPT], [HEADER-NAME], [CXX-TEST],
-# [CXX-PROLOGUE])
-# -------------------------------------------------------------------------
-# Look for the Boost library LIB-NAME (e.g., LIB-NAME = `thread', for
-# libboost_thread). Check that HEADER-NAME works and check that
-# libboost_LIB-NAME can link with the code CXX-TEST. The optional argument
-# CXX-PROLOGUE can be used to include some C++ code before the `main'
-# function.
-#
-# Invokes BOOST_FIND_HEADER([HEADER-NAME]) (see above).
-#
-# Boost libraries typically come compiled with several flavors (with different
-# runtime options) so PREFERRED-RT-OPT is the preferred suffix. A suffix is one
-# or more of the following letters: sgdpn (in that order). s = static
-# runtime, d = debug build, g = debug/diagnostic runtime, p = STLPort build,
-# n = (unsure) STLPort build without iostreams from STLPort (it looks like `n'
-# must always be used along with `p'). Additionally, PREFERRED-RT-OPT can
-# start with `mt-' to indicate that there is a preference for multi-thread
-# builds. Some sample values for PREFERRED-RT-OPT: (nothing), mt, d, mt-d, gdp
-# ... If you want to make sure you have a specific version of Boost
-# (eg, >= 1.33) you *must* invoke BOOST_REQUIRE before this macro.
-AC_DEFUN([BOOST_FIND_LIB],
-[AC_REQUIRE([BOOST_REQUIRE])dnl
-AC_REQUIRE([_BOOST_FIND_COMPILER_TAG])dnl
-AC_REQUIRE([BOOST_STATIC])dnl
-AC_REQUIRE([_BOOST_GUESS_WHETHER_TO_USE_MT])dnl
-if test x"$boost_cv_inc_path" = xno; then
- AC_MSG_NOTICE([Boost not available, not searching for the Boost $1 library])
-else
-dnl The else branch is huge and wasn't intended on purpose.
-AC_LANG_PUSH([C++])dnl
-AS_VAR_PUSHDEF([Boost_lib], [boost_cv_lib_$1])dnl
-AS_VAR_PUSHDEF([Boost_lib_LDFLAGS], [boost_cv_lib_$1_LDFLAGS])dnl
-AS_VAR_PUSHDEF([Boost_lib_LIBS], [boost_cv_lib_$1_LIBS])dnl
-BOOST_FIND_HEADER([$3])
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-# Now let's try to find the library. The algorithm is as follows: first look
-# for a given library name according to the user's PREFERRED-RT-OPT. For each
-# library name, we prefer to use the ones that carry the tag (toolset name).
-# Each library is searched through the various standard paths were Boost is
-# usually installed. If we can't find the standard variants, we try to
-# enforce -mt (for instance on MacOSX, libboost_threads.dylib doesn't exist
-# but there's -obviously- libboost_threads-mt.dylib).
-AC_CACHE_CHECK([for the Boost $1 library], [Boost_lib],
- [Boost_lib=no
- case "$2" in #(
- mt | mt-) boost_mt=-mt; boost_rtopt=;; #(
- mt* | mt-*) boost_mt=-mt; boost_rtopt=`expr "X$2" : 'Xmt-*\(.*\)'`;; #(
- *) boost_mt=; boost_rtopt=$2;;
- esac
- if test $enable_static_boost = yes; then
- boost_rtopt="s$boost_rtopt"
- fi
- # Find the proper debug variant depending on what we've been asked to find.
- case $boost_rtopt in #(
- *d*) boost_rt_d=$boost_rtopt;; #(
- *[[sgpn]]*) # Insert the `d' at the right place (in between `sg' and `pn')
- boost_rt_d=`echo "$boost_rtopt" | sed 's/\(s*g*\)\(p*n*\)/\1\2/'`;; #(
- *) boost_rt_d='-d';;
- esac
- # If the PREFERRED-RT-OPT are not empty, prepend a `-'.
- test -n "$boost_rtopt" && boost_rtopt="-$boost_rtopt"
- $boost_guess_use_mt && boost_mt=-mt
- # Look for the abs path the static archive.
- # $libext is computed by Libtool but let's make sure it's non empty.
- test -z "$libext" &&
- AC_MSG_ERROR([the libext variable is empty, did you invoke Libtool?])
- boost_save_ac_objext=$ac_objext
- # Generate the test file.
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([#include <$3>
-$5], [$4])])
-dnl Optimization hacks: compiling C++ is slow, especially with Boost. What
-dnl we're trying to do here is guess the right combination of link flags
-dnl (LIBS / LDFLAGS) to use a given library. This can take several
-dnl iterations before it succeeds and is thus *very* slow. So what we do
-dnl instead is that we compile the code first (and thus get an object file,
-dnl typically conftest.o). Then we try various combinations of link flags
-dnl until we succeed to link conftest.o in an executable. The problem is
-dnl that the various TRY_LINK / COMPILE_IFELSE macros of Autoconf always
-dnl remove all the temporary files including conftest.o. So the trick here
-dnl is to temporarily change the value of ac_objext so that conftest.o is
-dnl preserved accross tests. This is obviously fragile and I will burn in
-dnl hell for not respecting Autoconf's documented interfaces, but in the
-dnl mean time, it optimizes the macro by a factor of 5 to 30.
-dnl Another small optimization: the first argument of AC_COMPILE_IFELSE left
-dnl empty because the test file is generated only once above (before we
-dnl start the for loops).
- AC_COMPILE_IFELSE([],
- [ac_objext=do_not_rm_me_plz],
- [AC_MSG_ERROR([cannot compile a test that uses Boost $1])])
- ac_objext=$boost_save_ac_objext
- boost_failed_libs=
-# Don't bother to ident the 6 nested for loops, only the 2 innermost ones
-# matter.
-for boost_tag_ in -$boost_cv_lib_tag ''; do
-for boost_ver_ in -$boost_cv_lib_version ''; do
-for boost_mt_ in $boost_mt -mt ''; do
-for boost_rtopt_ in $boost_rtopt '' -d; do
- for boost_lib in \
- boost_$1$boost_tag_$boost_mt_$boost_rtopt_$boost_ver_ \
- boost_$1$boost_tag_$boost_rtopt_$boost_ver_ \
- boost_$1$boost_tag_$boost_mt_$boost_ver_ \
- boost_$1$boost_tag_$boost_ver_
- do
- # Avoid testing twice the same lib
- case $boost_failed_libs in #(
- *@$boost_lib@*) continue;;
- esac
- # If with_boost is empty, we'll search in /lib first, which is not quite
- # right so instead we'll try to a location based on where the headers are.
- boost_tmp_lib=$with_boost
- test x"$with_boost" = x && boost_tmp_lib=${boost_cv_inc_path%/include}
- for boost_ldpath in "$boost_tmp_lib/lib" '' \
- /opt/local/lib /usr/local/lib /opt/lib /usr/lib \
- "$with_boost" C:/Boost/lib /lib /usr/lib64 /lib64
- do
- test -e "$boost_ldpath" || continue
- boost_save_LDFLAGS=$LDFLAGS
- # Are we looking for a static library?
- case $boost_ldpath:$boost_rtopt_ in #(
- *?*:*s*) # Yes (Non empty boost_ldpath + s in rt opt)
- Boost_lib_LIBS="$boost_ldpath/lib$boost_lib.$libext"
- test -e "$Boost_lib_LIBS" || continue;; #(
- *) # No: use -lboost_foo to find the shared library.
- Boost_lib_LIBS="-l$boost_lib";;
- esac
- boost_save_LIBS=$LIBS
- LIBS="$Boost_lib_LIBS $LIBS"
- test x"$boost_ldpath" != x && LDFLAGS="$LDFLAGS -L$boost_ldpath"
-dnl First argument of AC_LINK_IFELSE left empty because the test file is
-dnl generated only once above (before we start the for loops).
- _BOOST_AC_LINK_IFELSE([],
- [Boost_lib=yes], [Boost_lib=no])
- ac_objext=$boost_save_ac_objext
- LDFLAGS=$boost_save_LDFLAGS
- LIBS=$boost_save_LIBS
- if test x"$Boost_lib" = xyes; then
- Boost_lib_LDFLAGS="-L$boost_ldpath -R$boost_ldpath"
- break 6
- else
- boost_failed_libs="$boost_failed_libs@$boost_lib@"
- fi
- done
- done
-done
-done
-done
-done
-rm -f conftest.$ac_objext
-])
-case $Boost_lib in #(
- no) _AC_MSG_LOG_CONFTEST
- AC_MSG_ERROR([cannot not find the flags to link with Boost $1])
- ;;
-esac
-AC_SUBST(AS_TR_CPP([BOOST_$1_LDFLAGS]), [$Boost_lib_LDFLAGS])
-AC_SUBST(AS_TR_CPP([BOOST_$1_LIBS]), [$Boost_lib_LIBS])
-CPPFLAGS=$boost_save_CPPFLAGS
-AS_VAR_POPDEF([Boost_lib])dnl
-AS_VAR_POPDEF([Boost_lib_LDFLAGS])dnl
-AS_VAR_POPDEF([Boost_lib_LIBS])dnl
-AC_LANG_POP([C++])dnl
-fi
-])# BOOST_FIND_LIB
-
-
-# --------------------------------------- #
-# Checks for the various Boost libraries. #
-# --------------------------------------- #
-
-# List of boost libraries: http://www.boost.org/libs/libraries.htm
-# The page http://beta.boost.org/doc/libs is useful: it gives the first release
-# version of each library (among other things).
-
-# BOOST_ARRAY()
-# -------------
-# Look for Boost.Array
-AC_DEFUN([BOOST_ARRAY],
-[BOOST_FIND_HEADER([boost/array.hpp])])
-
-
-# BOOST_ASIO()
-# ------------
-# Look for Boost.Asio (new in Boost 1.35).
-AC_DEFUN([BOOST_ASIO],
-[AC_REQUIRE([BOOST_SYSTEM])dnl
-BOOST_FIND_HEADER([boost/asio.hpp])])
-
-
-# BOOST_BIND()
-# ------------
-# Look for Boost.Bind
-AC_DEFUN([BOOST_BIND],
-[BOOST_FIND_HEADER([boost/bind.hpp])])
-
-
-# BOOST_CONVERSION()
-# ------------------
-# Look for Boost.Conversion (cast / lexical_cast)
-AC_DEFUN([BOOST_CONVERSION],
-[BOOST_FIND_HEADER([boost/cast.hpp])
-BOOST_FIND_HEADER([boost/lexical_cast.hpp])
-])# BOOST_CONVERSION
-
-
-# BOOST_DATE_TIME([PREFERRED-RT-OPT])
-# -----------------------------------
-# Look for Boost.Date_Time. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_DATE_TIME],
-[BOOST_FIND_LIB([date_time], [$1],
- [boost/date_time/posix_time/posix_time.hpp],
- [boost::posix_time::ptime t;])
-])# BOOST_DATE_TIME
-
-
-# BOOST_FILESYSTEM([PREFERRED-RT-OPT])
-# ------------------------------------
-# Look for Boost.Filesystem. For the documentation of PREFERRED-RT-OPT, see
-# the documentation of BOOST_FIND_LIB above.
-# Do not check for boost/filesystem.hpp because this file was introduced in
-# 1.34.
-AC_DEFUN([BOOST_FILESYSTEM],
-[# Do we have to check for Boost.System? This link-time dependency was
-# added as of 1.35.0. If we have a version <1.35, we must not attempt to
-# find Boost.System as it didn't exist by then.
-if test $boost_major_version -ge 135; then
-BOOST_SYSTEM([$1])
-fi # end of the Boost.System check.
-boost_filesystem_save_LIBS=$LIBS
-boost_filesystem_save_LDFLAGS=$LDFLAGS
-m4_pattern_allow([^BOOST_SYSTEM_(LIBS|LDFLAGS)$])dnl
-LIBS="$LIBS $BOOST_SYSTEM_LIBS"
-LDFLAGS="$LDFLAGS $BOOST_SYSTEM_LDFLAGS"
-BOOST_FIND_LIB([filesystem], [$1],
- [boost/filesystem/path.hpp], [boost::filesystem::path p;])
-LIBS=$boost_filesystem_save_LIBS
-LDFLAGS=$boost_filesystem_save_LDFLAGS
-])# BOOST_FILESYSTEM
-
-
-# BOOST_FOREACH()
-# ---------------
-# Look for Boost.Foreach
-AC_DEFUN([BOOST_FOREACH],
-[BOOST_FIND_HEADER([boost/foreach.hpp])])
-
-
-# BOOST_FORMAT()
-# --------------
-# Look for Boost.Format
-# Note: we can't check for boost/format/format_fwd.hpp because the header isn't
-# standalone. It can't be compiled because it triggers the following error:
-# boost/format/detail/config_macros.hpp:88: error: 'locale' in namespace 'std'
-# does not name a type
-AC_DEFUN([BOOST_FORMAT],
-[BOOST_FIND_HEADER([boost/format.hpp])])
-
-
-# BOOST_FUNCTION()
-# ----------------
-# Look for Boost.Function
-AC_DEFUN([BOOST_FUNCTION],
-[BOOST_FIND_HEADER([boost/function.hpp])])
-
-
-# BOOST_GRAPH([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.Graphs. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_GRAPH],
-[BOOST_FIND_LIB([graph], [$1],
- [boost/graph/adjacency_list.hpp], [boost::adjacency_list<> g;])
-])# BOOST_GRAPH
-
-
-# BOOST_IOSTREAMS([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.IOStreams. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_IOSTREAMS],
-[BOOST_FIND_LIB([iostreams], [$1],
- [boost/iostreams/device/file_descriptor.hpp],
- [boost::iostreams::file_descriptor fd(0); fd.close();])
-])# BOOST_IOSTREAMS
-
-
-# BOOST_HASH()
-# ------------
-# Look for Boost.Functional/Hash
-AC_DEFUN([BOOST_HASH],
-[BOOST_FIND_HEADER([boost/functional/hash.hpp])])
-
-
-# BOOST_LAMBDA()
-# --------------
-# Look for Boost.Lambda
-AC_DEFUN([BOOST_LAMBDA],
-[BOOST_FIND_HEADER([boost/lambda/lambda.hpp])])
-
-
-# BOOST_MATH()
-# ------------
-# Look for Boost.Math
-# TODO: This library isn't header-only but it comes in multiple different
-# flavors that don't play well with BOOST_FIND_LIB (e.g, libboost_math_c99,
-# libboost_math_c99f, libboost_math_c99l, libboost_math_tr1,
-# libboost_math_tr1f, libboost_math_tr1l). This macro must be fixed to do the
-# right thing anyway.
-AC_DEFUN([BOOST_MATH],
-[BOOST_FIND_HEADER([boost/math/special_functions.hpp])])
-
-
-# BOOST_MULTIARRAY()
-# ------------------
-# Look for Boost.MultiArray
-AC_DEFUN([BOOST_MULTIARRAY],
-[BOOST_FIND_HEADER([boost/multi_array.hpp])])
-
-
-# BOOST_NUMERIC_CONVERSION()
-# --------------------------
-# Look for Boost.NumericConversion (policy-based numeric conversion)
-AC_DEFUN([BOOST_NUMERIC_CONVERSION],
-[BOOST_FIND_HEADER([boost/numeric/conversion/converter.hpp])
-])# BOOST_NUMERIC_CONVERSION
-
-
-# BOOST_OPTIONAL()
-# ----------------
-# Look for Boost.Optional
-AC_DEFUN([BOOST_OPTIONAL],
-[BOOST_FIND_HEADER([boost/optional.hpp])])
-
-
-# BOOST_PREPROCESSOR()
-# --------------------
-# Look for Boost.Preprocessor
-AC_DEFUN([BOOST_PREPROCESSOR],
-[BOOST_FIND_HEADER([boost/preprocessor/repeat.hpp])])
-
-
-# BOOST_PROGRAM_OPTIONS([PREFERRED-RT-OPT])
-# -----------------------------------------
-# Look for Boost.Program_options. For the documentation of PREFERRED-RT-OPT, see
-# the documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_PROGRAM_OPTIONS],
-[BOOST_FIND_LIB([program_options], [$1],
- [boost/program_options.hpp],
- [boost::program_options::options_description d("test");])
-])# BOOST_PROGRAM_OPTIONS
-
-
-# BOOST_REF()
-# -----------
-# Look for Boost.Ref
-AC_DEFUN([BOOST_REF],
-[BOOST_FIND_HEADER([boost/ref.hpp])])
-
-
-# BOOST_REGEX([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.Regex. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_REGEX],
-[BOOST_FIND_LIB([regex], [$1],
- [boost/regex.hpp],
- [boost::regex exp("*"); boost::regex_match("foo", exp);])
-])# BOOST_REGEX
-
-# BOOST_MPI([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.MPI. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_MPI],
-[BOOST_FIND_LIB([mpi], [$1],
- [boost/mpi/communicator.hpp],
- [boost::mpi::communicator world;])
-])# BOOST_MPI
-
-
-# BOOST_SERIALIZATION([PREFERRED-RT-OPT])
-# ---------------------------------------
-# Look for Boost.Serialization. For the documentation of PREFERRED-RT-OPT, see
-# the documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_SERIALIZATION],
-[BOOST_FIND_LIB([serialization], [$1],
- [boost/archive/text_oarchive.hpp],
- [std::ostream* o = 0; // Cheap way to get an ostream...
- boost::archive::text_oarchive t(*o);])
-])# BOOST_SIGNALS
-
-
-# BOOST_SIGNALS([PREFERRED-RT-OPT])
-# ---------------------------------
-# Look for Boost.Signals. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_SIGNALS],
-[BOOST_FIND_LIB([signals], [$1],
- [boost/signal.hpp],
- [boost::signal<void ()> s;])
-])# BOOST_SIGNALS
-
-
-# BOOST_SMART_PTR()
-# -----------------
-# Look for Boost.SmartPtr
-AC_DEFUN([BOOST_SMART_PTR],
-[BOOST_FIND_HEADER([boost/scoped_ptr.hpp])
-BOOST_FIND_HEADER([boost/shared_ptr.hpp])
-])
-
-
-# BOOST_STATICASSERT()
-# --------------------
-# Look for Boost.StaticAssert
-AC_DEFUN([BOOST_STATICASSERT],
-[BOOST_FIND_HEADER([boost/static_assert.hpp])])
-
-
-# BOOST_STRING_ALGO()
-# -------------------
-# Look for Boost.StringAlgo
-AC_DEFUN([BOOST_STRING_ALGO],
-[BOOST_FIND_HEADER([boost/algorithm/string.hpp])
-])
-
-
-# BOOST_SYSTEM([PREFERRED-RT-OPT])
-# --------------------------------
-# Look for Boost.System. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above. This library was introduced in Boost
-# 1.35.0.
-AC_DEFUN([BOOST_SYSTEM],
-[BOOST_FIND_LIB([system], [$1],
- [boost/system/error_code.hpp],
- [boost::system::error_code e; e.clear();])
-])# BOOST_SYSTEM
-
-
-# BOOST_TEST([PREFERRED-RT-OPT])
-# ------------------------------
-# Look for Boost.Test. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_TEST],
-[m4_pattern_allow([^BOOST_CHECK$])dnl
-BOOST_FIND_LIB([unit_test_framework], [$1],
- [boost/test/unit_test.hpp], [BOOST_CHECK(2 == 2);],
- [using boost::unit_test::test_suite;
- test_suite* init_unit_test_suite(int argc, char ** argv)
- { return NULL; }])
-])# BOOST_TEST
-
-
-# BOOST_THREADS([PREFERRED-RT-OPT])
-# ---------------------------------
-# Look for Boost.Thread. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-# FIXME: Provide an alias "BOOST_THREAD".
-AC_DEFUN([BOOST_THREADS],
-[dnl Having the pthread flag is required at least on GCC3 where
-dnl boost/thread.hpp would complain if we try to compile without
-dnl -pthread on GNU/Linux.
-AC_REQUIRE([_BOOST_PTHREAD_FLAG])dnl
-boost_threads_save_LIBS=$LIBS
-boost_threads_save_CPPFLAGS=$CPPFLAGS
-LIBS="$LIBS $boost_cv_pthread_flag"
-# Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
-# boost/thread.hpp will trigger a #error if -pthread isn't used:
-# boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
-# is not turned on. Please set the correct command line options for
-# threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
-CPPFLAGS="$CPPFLAGS $boost_cv_pthread_flag"
-BOOST_FIND_LIB([thread], [$1],
- [boost/thread.hpp], [boost::thread t; boost::mutex m;])
-BOOST_THREAD_LIBS="$BOOST_THREAD_LIBS $boost_cv_pthread_flag"
-BOOST_CPPFLAGS="$BOOST_CPPFLAGS $boost_cv_pthread_flag"
-LIBS=$boost_threads_save_LIBS
-CPPFLAGS=$boost_threads_save_CPPFLAGS
-])# BOOST_THREADS
-
-
-# BOOST_TOKENIZER()
-# -----------------
-# Look for Boost.Tokenizer
-AC_DEFUN([BOOST_TOKENIZER],
-[BOOST_FIND_HEADER([boost/tokenizer.hpp])])
-
-
-# BOOST_TRIBOOL()
-# ---------------
-# Look for Boost.Tribool
-AC_DEFUN([BOOST_TRIBOOL],
-[BOOST_FIND_HEADER([boost/logic/tribool_fwd.hpp])
-BOOST_FIND_HEADER([boost/logic/tribool.hpp])
-])
-
-
-# BOOST_TUPLE()
-# -------------
-# Look for Boost.Tuple
-AC_DEFUN([BOOST_TUPLE],
-[BOOST_FIND_HEADER([boost/tuple/tuple.hpp])])
-
-
-# BOOST_TYPETRAITS()
-# --------------------
-# Look for Boost.TypeTraits
-AC_DEFUN([BOOST_TYPETRAITS],
-[BOOST_FIND_HEADER([boost/type_traits.hpp])])
-
-
-# BOOST_UTILITY()
-# ---------------
-# Look for Boost.Utility (noncopyable, result_of, base-from-member idiom,
-# etc.)
-AC_DEFUN([BOOST_UTILITY],
-[BOOST_FIND_HEADER([boost/utility.hpp])])
-
-
-# BOOST_VARIANT()
-# ---------------
-# Look for Boost.Variant.
-AC_DEFUN([BOOST_VARIANT],
-[BOOST_FIND_HEADER([boost/variant/variant_fwd.hpp])
-BOOST_FIND_HEADER([boost/variant.hpp])])
-
-
-# BOOST_WAVE([PREFERRED-RT-OPT])
-# ------------------------------
-# NOTE: If you intend to use Wave/Spirit with thread support, make sure you
-# call BOOST_THREADS first.
-# Look for Boost.Wave. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-AC_DEFUN([BOOST_WAVE],
-[AC_REQUIRE([BOOST_FILESYSTEM])dnl
-AC_REQUIRE([BOOST_DATE_TIME])dnl
-boost_wave_save_LIBS=$LIBS
-boost_wave_save_LDFLAGS=$LDFLAGS
-m4_pattern_allow([^BOOST_((FILE)?SYSTEM|DATE_TIME|THREAD)_(LIBS|LDFLAGS)$])dnl
-LIBS="$LIBS $BOOST_SYSTEM_LIBS $BOOST_FILESYSTEM_LIBS $BOOST_DATE_TIME_LIBS\
-$BOOST_THREAD_LIBS"
-LDFLAGS="$LDFLAGS $BOOST_SYSTEM_LDFLAGS $BOOST_FILESYSTEM_LDFLAGS\
-$BOOST_DATE_TIME_LDFLAGS $BOOST_THREAD_LDFLAGS"
-BOOST_FIND_LIB([wave], [$1],
- [boost/wave.hpp],
- [boost::wave::token_id id; get_token_name(id);])
-LIBS=$boost_wave_save_LIBS
-LDFLAGS=$boost_wave_save_LDFLAGS
-])# BOOST_WAVE
-
-
-# BOOST_XPRESSIVE()
-# -----------------
-# Look for Boost.Xpressive (new since 1.36.0).
-AC_DEFUN([BOOST_XPRESSIVE],
-[BOOST_FIND_HEADER([boost/xpressive/xpressive.hpp])])
-
-
-# ----------------- #
-# Internal helpers. #
-# ----------------- #
-
-
-# _BOOST_PTHREAD_FLAG()
-# ---------------------
-# Internal helper for BOOST_THREADS. Based on ACX_PTHREAD:
-# http://autoconf-archive.cryp.to/acx_pthread.html
-AC_DEFUN([_BOOST_PTHREAD_FLAG],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_LANG_PUSH([C++])dnl
-AC_CACHE_CHECK([for the flags needed to use pthreads], [boost_cv_pthread_flag],
-[ boost_cv_pthread_flag=
- # The ordering *is* (sometimes) important. Some notes on the
- # individual items follow:
- # (none): in case threads are in libc; should be tried before -Kthread and
- # other compiler flags to prevent continual compiler warnings
- # -lpthreads: AIX (must check this before -lpthread)
- # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
- # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
- # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
- # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
- # -pthreads: Solaris/GCC
- # -mthreads: MinGW32/GCC, Lynx/GCC
- # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
- # doesn't hurt to check since this sometimes defines pthreads too;
- # also defines -D_REENTRANT)
- # ... -mt is also the pthreads flag for HP/aCC
- # -lpthread: GNU Linux, etc.
- # --thread-safe: KAI C++
- case $host_os in #(
- *solaris*)
- # On Solaris (at least, for some versions), libc contains stubbed
- # (non-functional) versions of the pthreads routines, so link-based
- # tests will erroneously succeed. (We need to link with -pthreads/-mt/
- # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
- # a function called by this macro, so we could check for that, but
- # who knows whether they'll stub that too in a future libc.) So,
- # we'll just look for -pthreads and -lpthread first:
- boost_pthread_flags="-pthreads -lpthread -mt -pthread";; #(
- *)
- boost_pthread_flags="-lpthreads -Kthread -kthread -llthread -pthread \
- -pthreads -mthreads -lpthread --thread-safe -mt";;
- esac
- # Generate the test file.
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([#include <pthread.h>],
- [pthread_t th; pthread_join(th, 0);
- pthread_attr_init(0); pthread_cleanup_push(0, 0);
- pthread_create(0,0,0,0); pthread_cleanup_pop(0);])])
- for boost_pthread_flag in '' $boost_pthread_flags; do
- boost_pthread_ok=false
-dnl Re-use the test file already generated.
- boost_pthreads__save_LIBS=$LIBS
- LIBS="$LIBS $boost_pthread_flag"
- AC_LINK_IFELSE([],
- [if grep ".*$boost_pthread_flag" conftest.err; then
- echo "This flag seems to have triggered warnings" >&AS_MESSAGE_LOG_FD
- else
- boost_pthread_ok=:; boost_cv_pthread_flag=$boost_pthread_flag
- fi])
- LIBS=$boost_pthreads__save_LIBS
- $boost_pthread_ok && break
- done
-])
-AC_LANG_POP([C++])dnl
-])# _BOOST_PTHREAD_FLAG
-
-
-# _BOOST_gcc_test(MAJOR, MINOR)
-# -----------------------------
-# Internal helper for _BOOST_FIND_COMPILER_TAG.
-m4_define([_BOOST_gcc_test],
-["defined __GNUC__ && __GNUC__ == $1 && __GNUC_MINOR__ == $2 && !defined __ICC @ gcc$1$2"])dnl
-
-
-# _BOOST_FIND_COMPILER_TAG()
-# --------------------------
-# Internal. When Boost is installed without --layout=system, each library
-# filename will hold a suffix that encodes the compiler used during the
-# build. The Boost build system seems to call this a `tag'.
-AC_DEFUN([_BOOST_FIND_COMPILER_TAG],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_CACHE_CHECK([for the toolset name used by Boost for $CXX], [boost_cv_lib_tag],
-[AC_LANG_PUSH([C++])dnl
- boost_cv_lib_tag=unknown
- # The following tests are mostly inspired by boost/config/auto_link.hpp
- # The list is sorted to most recent/common to oldest compiler (in order
- # to increase the likelihood of finding the right compiler with the
- # least number of compilation attempt).
- # Beware that some tests are sensible to the order (for instance, we must
- # look for MinGW before looking for GCC3).
- # I used one compilation test per compiler with a #error to recognize
- # each compiler so that it works even when cross-compiling (let me know
- # if you know a better approach).
- # Known missing tags (known from Boost's tools/build/v2/tools/common.jam):
- # como, edg, kcc, bck, mp, sw, tru, xlc
- # I'm not sure about my test for `il' (be careful: Intel's ICC pre-defines
- # the same defines as GCC's).
- # TODO: Move the test on GCC 4.4 up once it's released.
- for i in \
- _BOOST_gcc_test(4, 3) \
- _BOOST_gcc_test(4, 2) \
- _BOOST_gcc_test(4, 1) \
- _BOOST_gcc_test(4, 0) \
- "defined __GNUC__ && __GNUC__ == 3 && !defined __ICC \
- && (defined WIN32 || defined WINNT || defined _WIN32 || defined __WIN32 \
- || defined __WIN32__ || defined __WINNT || defined __WINNT__) @ mgw" \
- _BOOST_gcc_test(3, 4) \
- _BOOST_gcc_test(3, 3) \
- "defined _MSC_VER && _MSC_VER >= 1500 @ vc90" \
- "defined _MSC_VER && _MSC_VER == 1400 @ vc80" \
- _BOOST_gcc_test(3, 2) \
- "defined _MSC_VER && _MSC_VER == 1310 @ vc71" \
- _BOOST_gcc_test(3, 1) \
- _BOOST_gcc_test(3, 0) \
- "defined __BORLANDC__ @ bcb" \
- "defined __ICC && (defined __unix || defined __unix__) @ il" \
- "defined __ICL @ iw" \
- "defined _MSC_VER && _MSC_VER == 1300 @ vc7" \
- _BOOST_gcc_test(4, 4) \
- _BOOST_gcc_test(2, 95) \
- "defined __MWERKS__ && __MWERKS__ <= 0x32FF @ cw9" \
- "defined _MSC_VER && _MSC_VER < 1300 && !defined UNDER_CE @ vc6" \
- "defined _MSC_VER && _MSC_VER < 1300 && defined UNDER_CE @ evc4" \
- "defined __MWERKS__ && __MWERKS__ <= 0x31FF @ cw8"
- do
- boost_tag_test=`expr "X$i" : 'X\([[^@]]*\) @ '`
- boost_tag=`expr "X$i" : 'X[[^@]]* @ \(.*\)'`
- AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#if $boost_tag_test
-/* OK */
-#else
-# error $boost_tag_test
-#endif
-]])], [boost_cv_lib_tag=$boost_tag; break], [])
- done
-AC_LANG_POP([C++])dnl
- case $boost_cv_lib_tag in #(
- # Some newer (>= 1.35?) versions of Boost seem to only use "gcc" as opposed
- # to "gcc41" for instance.
- *-gcc | *'-gcc ') :;; #( Don't re-add -gcc: it's already in there.
- gcc*)
- boost_tag_x=
- case $host_os in #(
- darwin*)
- if test $boost_major_version -ge 136; then
- # The `x' added in r46793 of Boost.
- boost_tag_x=x
- fi;;
- esac
- # We can specify multiple tags in this variable because it's used by
- # BOOST_FIND_LIB that does a `for tag in -$boost_cv_lib_tag' ...
- boost_cv_lib_tag="$boost_tag_x$boost_cv_lib_tag -${boost_tag_x}gcc"
- ;; #(
- unknown)
- AC_MSG_WARN([[could not figure out which toolset name to use for $CXX]])
- boost_cv_lib_tag=
- ;;
- esac
-])dnl end of AC_CACHE_CHECK
-])# _BOOST_FIND_COMPILER_TAG
-
-
-# _BOOST_GUESS_WHETHER_TO_USE_MT()
-# --------------------------------
-# Compile a small test to try to guess whether we should favor MT (Multi
-# Thread) flavors of Boost. Sets boost_guess_use_mt accordingly.
-AC_DEFUN([_BOOST_GUESS_WHETHER_TO_USE_MT],
-[# Check whether we do better use `mt' even though we weren't ask to.
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#if defined _REENTRANT || defined _MT || defined __MT__
-/* use -mt */
-#else
-# error MT not needed
-#endif
-]])], [boost_guess_use_mt=:], [boost_guess_use_mt=false])
-])
-
-# _BOOST_AC_LINK_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
-# -------------------------------------------------------------------
-# Fork of _AC_LINK_IFELSE that preserves conftest.o across calls. Fragile,
-# will break when Autoconf changes its internals. Requires that you manually
-# rm -f conftest.$ac_objext in between to really different tests, otherwise
-# you will try to link a conftest.o left behind by a previous test.
-# Used to aggressively optimize BOOST_FIND_LIB (see the big comment in this
-# macro).
-#
-# Don't use "break" in the actions, as it would short-circuit some code
-# this macro runs after the actions.
-m4_define([_BOOST_AC_LINK_IFELSE],
-[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])dnl
-rm -f conftest$ac_exeext
-boost_save_ac_ext=$ac_ext
-boost_use_source=:
-# If we already have a .o, re-use it. We change $ac_ext so that $ac_link
-# tries to link the existing object file instead of compiling from source.
-test -f conftest.$ac_objext && ac_ext=$ac_objext && boost_use_source=false &&
- _AS_ECHO_LOG([re-using the existing conftest.$ac_objext])
-AS_IF([_AC_DO_STDERR($ac_link) && {
- test -z "$ac_[]_AC_LANG_ABBREV[]_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest$ac_exeext && {
- test "$cross_compiling" = yes ||
- $as_executable_p conftest$ac_exeext
-dnl FIXME: use AS_TEST_X instead when 2.61 is widespread enough.
- }],
- [$2],
- [if $boost_use_source; then
- _AC_MSG_LOG_CONFTEST
- fi
- $3])
-ac_objext=$boost_save_ac_objext
-ac_ext=$boost_save_ac_ext
-dnl Delete also the IPA/IPO (Inter Procedural Analysis/Optimization)
-dnl information created by the PGI compiler (conftest_ipa8_conftest.oo),
-dnl as it would interfere with the next link command.
-rm -f core conftest.err conftest_ipa8_conftest.oo \
- conftest$ac_exeext m4_ifval([$1], [conftest.$ac_ext])[]dnl
-])# _BOOST_AC_LINK_IFELSE
-
-# Local Variables:
-# mode: autoconf
-# End:
diff --git a/mert/BleuScorer.h b/mert/BleuScorer.h
index 7e0e18b53..a10b09a7a 100644
--- a/mert/BleuScorer.h
+++ b/mert/BleuScorer.h
@@ -56,7 +56,7 @@ private:
typedef map<vector<int>,int,CompareNgrams> counts_t;
typedef map<vector<int>,int,CompareNgrams>::iterator counts_iterator;
- typedef map<vector<int>,int,CompareNgrams>::iterator counts_const_iterator;
+ typedef map<vector<int>,int,CompareNgrams>::const_iterator counts_const_iterator;
typedef ScopedVector<counts_t> refcounts_t;
/**
diff --git a/mert/Data.cpp b/mert/Data.cpp
index ed1100106..23fdc6d82 100644
--- a/mert/Data.cpp
+++ b/mert/Data.cpp
@@ -7,7 +7,7 @@
*/
#include <algorithm>
-#include <cassert>
+#include "util/check.hh"
#include <cmath>
#include <fstream>
@@ -149,12 +149,12 @@ void Data::mergeSparseFeatures() {
void Data::createShards(size_t shard_count, float shard_size, const string& scorerconfig,
std::vector<Data>& shards)
{
- assert(shard_count);
- assert(shard_size >= 0);
- assert(shard_size <= 1);
+ CHECK(shard_count);
+ CHECK(shard_size >= 0);
+ CHECK(shard_size <= 1);
size_t data_size = scoredata->size();
- assert(data_size == featdata->size());
+ CHECK(data_size == featdata->size());
shard_size *= data_size;
diff --git a/mert/FeatureDataIterator.h b/mert/FeatureDataIterator.h
index 6df249822..81f072970 100644
--- a/mert/FeatureDataIterator.h
+++ b/mert/FeatureDataIterator.h
@@ -41,7 +41,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
class FileFormatException : public util::Exception
{
public:
- explicit FileFormatException(const std::string filename, const std::string& line) {
+ explicit FileFormatException(const std::string& filename, const std::string& line) {
*this << "Error in line \"" << line << "\" of " << filename;
}
};
@@ -68,7 +68,7 @@ class FeatureDataIterator :
{
public:
FeatureDataIterator();
- FeatureDataIterator(const std::string& filename);
+ explicit FeatureDataIterator(const std::string& filename);
static FeatureDataIterator end() {
return FeatureDataIterator();
@@ -89,5 +89,3 @@ class FeatureDataIterator :
};
#endif
-
-
diff --git a/mert/Jamfile b/mert/Jamfile
new file mode 100644
index 000000000..e095f9577
--- /dev/null
+++ b/mert/Jamfile
@@ -0,0 +1,44 @@
+lib m ;
+
+lib mert_lib :
+Util.cpp
+FileStream.cpp
+Timer.cpp
+ScoreStats.cpp ScoreArray.cpp ScoreData.cpp
+ScoreDataIterator.cpp
+FeatureStats.cpp FeatureArray.cpp FeatureData.cpp
+FeatureDataIterator.cpp
+Data.cpp
+BleuScorer.cpp
+Point.cpp
+PerScorer.cpp
+Scorer.cpp
+ScorerFactory.cpp
+Optimizer.cpp
+TERsrc/alignmentStruct.cpp
+TERsrc/hashMap.cpp
+TERsrc/hashMapStringInfos.cpp
+TERsrc/stringHasher.cpp
+TERsrc/terAlignment.cpp
+TERsrc/terShift.cpp
+TERsrc/hashMapInfos.cpp
+TERsrc/infosHasher.cpp
+TERsrc/stringInfosHasher.cpp
+TERsrc/tercalc.cpp
+TERsrc/tools.cpp
+TerScorer.cpp
+CderScorer.cpp
+MergeScorer.cpp
+../util//kenutil m ..//z ;
+
+exe mert : mert.cpp mert_lib ../moses/src//ThreadPool ;
+
+exe extractor : extractor.cpp mert_lib ;
+
+exe evaluator : evaluator.cpp mert_lib ;
+
+exe pro : pro.cpp mert_lib ..//boost_program_options ;
+
+alias programs : mert extractor evaluator pro ;
+
+install legacy : programs : <location>. ;
diff --git a/mert/Makefile.am b/mert/Makefile.am
deleted file mode 100644
index 0b16a2f4f..000000000
--- a/mert/Makefile.am
+++ /dev/null
@@ -1,45 +0,0 @@
-lib_LTLIBRARIES = libmert.la
-bin_PROGRAMS = mert extractor evaluator pro
-AM_CPPFLAGS = -W -Wall -Wno-unused -ffor-scope -DTRACE_ENABLE $(BOOST_CPPFLAGS)
-
-libmert_la_SOURCES = \
-Util.cpp \
-FileStream.cpp \
-Timer.cpp \
-ScoreStats.cpp ScoreArray.cpp ScoreData.cpp \
-ScoreDataIterator.cpp \
-FeatureStats.cpp FeatureArray.cpp FeatureData.cpp \
-FeatureDataIterator.cpp \
-Data.cpp \
-BleuScorer.cpp \
-Point.cpp \
-PerScorer.cpp \
-Scorer.cpp \
-ScorerFactory.cpp \
-Optimizer.cpp \
-TERsrc/alignmentStruct.cpp \
-TERsrc/hashMap.cpp \
-TERsrc/hashMapStringInfos.cpp \
-TERsrc/stringHasher.cpp \
-TERsrc/terAlignment.cpp \
-TERsrc/terShift.cpp \
-TERsrc/hashMapInfos.cpp \
-TERsrc/infosHasher.cpp \
-TERsrc/stringInfosHasher.cpp \
-TERsrc/tercalc.cpp \
-TERsrc/tools.cpp \
-TerScorer.cpp \
-CderScorer.cpp \
-MergeScorer.cpp
-
-mert_SOURCES = mert.cpp $(top_builddir)/moses/src/ThreadPool.cpp
-extractor_SOURCES = extractor.cpp
-evaluator_SOURCES = evaluator.cpp
-pro_SOURCES = pro.cpp
-
-extractor_LDADD = libmert.la -lm -lz
-mert_LDADD = libmert.la -lm -lz $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-evaluator_LDADD = libmert.la -lm -lz
-pro_LDADD = libmert.la $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_LDFLAGS) $(BOOST_PROGRAM_OPTIONS_LDFLAGS) $(BOOST_PROGRAM_OPTIONS_LIBS)
-pro_DEPENDENCIES = $(top_srcdir)/kenlm/libkenlm.la libmert.la
-
diff --git a/mert/Optimizer.cpp b/mert/Optimizer.cpp
index cd9703ddd..73fabb9ad 100644
--- a/mert/Optimizer.cpp
+++ b/mert/Optimizer.cpp
@@ -1,7 +1,7 @@
#include "Optimizer.h"
#include <cmath>
-#include <cassert>
+#include "util/check.hh"
#include <vector>
#include <limits>
#include <map>
@@ -49,7 +49,7 @@ Optimizer::Optimizer(unsigned Pd, vector<unsigned> i2O, vector<parameter_t> star
// Warning: the init vector is a full set of parameters, of dimension pdim!
Point::pdim = Pd;
- assert(start.size() == Pd);
+ CHECK(start.size() == Pd);
Point::dim = i2O.size();
Point::optindices = i2O;
if (Point::pdim > Point::dim) {
@@ -90,7 +90,7 @@ map<float,diff_t >::iterator AddThreshold(map<float,diff_t >& thresholdmap, floa
} else {
// normal case
pair<map<float,diff_t>::iterator, bool> ins = thresholdmap.insert(threshold(newt, diff_t(1, newdiff)));
- assert(ins.second); // we really inserted something
+ CHECK(ins.second); // we really inserted something
it = ins.first;
}
return it;
@@ -174,7 +174,7 @@ statscore_t Optimizer::LineOptimize(const Point& origin, const Point& direction,
// The rightmost bestindex is the one with the highest slope.
// They should be equal but there might be.
- assert(abs(leftmost->first-gradient.rbegin()->first) < 0.0001);
+ CHECK(abs(leftmost->first-gradient.rbegin()->first) < 0.0001);
// A small difference due to rounding error
break;
}
@@ -195,7 +195,7 @@ statscore_t Optimizer::LineOptimize(const Point& origin, const Point& direction,
map<float,diff_t>::iterator tit = thresholdmap.find(leftmostx);
if (tit == previnserted) {
// The threshold is the same as before can happen if 2 candidates are the same for example.
- assert(previnserted->second.back().first == newd.first);
+ CHECK(previnserted->second.back().first == newd.first);
previnserted->second.back()=newd; // just replace the 1 best for sentence S
// previnsert doesn't change
} else {
@@ -209,14 +209,14 @@ statscore_t Optimizer::LineOptimize(const Point& origin, const Point& direction,
} else {
// We append the diffs in previnsert to tit before destroying previnsert.
tit->second.insert(tit->second.end(),previnserted->second.begin(),previnserted->second.end());
- assert(tit->second.back().first == newd.first);
+ CHECK(tit->second.back().first == newd.first);
tit->second.back()=newd; // change diff for sentence S
thresholdmap.erase(previnserted); // erase old previnsert
previnserted = tit; // point previnsert to the new threshold
}
}
- assert(previnserted != thresholdmap.end());
+ CHECK(previnserted != thresholdmap.end());
} else { //normal insertion process
previnserted = AddThreshold(thresholdmap, leftmostx, newd);
}
@@ -252,7 +252,7 @@ statscore_t Optimizer::LineOptimize(const Point& origin, const Point& direction,
float bestx = MIN_FLOAT;
// We skipped the first el of thresholdlist but GetIncStatScore return 1 more for first1best.
- assert(scores.size() == thresholdmap.size());
+ CHECK(scores.size() == thresholdmap.size());
for (unsigned int sc = 0; sc != scores.size(); sc++) {
//cerr << "x=" << thrit->first << " => " << scores[sc] << endl;
if (scores[sc] > bestscore) {
@@ -309,7 +309,7 @@ statscore_t Optimizer::LineOptimize(const Point& origin, const Point& direction,
void Optimizer::Get1bests(const Point& P, vector<unsigned>& bests) const
{
- assert(FData);
+ CHECK(FData);
bests.clear();
bests.resize(size());
@@ -362,7 +362,7 @@ statscore_t Optimizer::Run(Point& P) const
vector<statscore_t> Optimizer::GetIncStatScore(vector<unsigned> thefirst, vector<vector <pair<unsigned,unsigned> > > thediffs) const
{
- assert(scorer);
+ CHECK(scorer);
vector<statscore_t> theres;
diff --git a/mert/Point.cpp b/mert/Point.cpp
index 71338966a..322f10f77 100644
--- a/mert/Point.cpp
+++ b/mert/Point.cpp
@@ -2,7 +2,7 @@
#include <cmath>
#include <cstdlib>
-#include <cassert>
+#include "util/check.hh"
#include <limits>
#include "FeatureStats.h"
@@ -37,7 +37,7 @@ Point::Point(const vector<parameter_t>& init,
m_max[i] = max[i];
}
} else {
- assert(init.size()==pdim);
+ CHECK(init.size()==pdim);
for (unsigned int i=0; i<Point::dim; i++) {
operator[](i)=init[optindices[i]];
m_min[i] = min[optindices[i]];
@@ -50,8 +50,8 @@ Point::~Point() {}
void Point::Randomize()
{
- assert(m_min.size()==Point::dim);
- assert(m_max.size()==Point::dim);
+ CHECK(m_min.size()==Point::dim);
+ CHECK(m_max.size()==Point::dim);
for (unsigned int i=0; i<size(); i++) {
operator[](i) = m_min[i] +
(float)random()/(float)RAND_MAX * (float)(m_max[i]-m_min[i]);
@@ -76,7 +76,7 @@ double Point::operator*(const FeatureStats& F) const
Point Point::operator+(const Point& p2) const
{
- assert(p2.size() == size());
+ CHECK(p2.size() == size());
Point Res(*this);
for (unsigned i = 0; i < size(); i++) {
Res[i] += p2[i];
@@ -88,7 +88,7 @@ Point Point::operator+(const Point& p2) const
void Point::operator+=(const Point& p2)
{
- assert(p2.size() == size());
+ CHECK(p2.size() == size());
for (unsigned i = 0; i < size(); i++) {
operator[](i) += p2[i];
}
diff --git a/mert/ScoreDataIterator.h b/mert/ScoreDataIterator.h
index 3248a7a78..4633b8651 100644
--- a/mert/ScoreDataIterator.h
+++ b/mert/ScoreDataIterator.h
@@ -43,7 +43,7 @@ class ScoreDataIterator :
{
public:
ScoreDataIterator();
- ScoreDataIterator(const std::string& filename);
+ explicit ScoreDataIterator(const std::string& filename);
static ScoreDataIterator end() {
return ScoreDataIterator();
@@ -62,6 +62,4 @@ class ScoreDataIterator :
std::vector<ScoreDataItem> m_next;
};
-
#endif
-
diff --git a/mert/normalise.py b/mert/normalise.py
deleted file mode 100755
index 5099858b9..000000000
--- a/mert/normalise.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/python
-
-#
-# Normalise the references or nbest list, prior to statistic and feature extraction
-#
-
-import optparse,sys, math, re, xml.sax.saxutils
-
-preserve_case = True
-
-normalize1 = [
- ('<skipped>', ''), # strip "skipped" tags
- (r'-\n', ''), # strip end-of-line hyphenation and join lines
- (r'\n', ' '), # join lines
-# (r'(\d)\s+(?=\d)', r'\1'), # join digits
-]
-normalize1 = [(re.compile(pattern), replace) for (pattern, replace) in normalize1]
-
-normalize2 = [
- (r'([\{-\~\[-\` -\&\(-\+\:-\@\/])',r' \1 '), # tokenize punctuation. apostrophe is missing
- (r'([^0-9])([\.,])',r'\1 \2 '), # tokenize period and comma unless preceded by a digit
- (r'([\.,])([^0-9])',r' \1 \2'), # tokenize period and comma unless followed by a digit
- (r'([0-9])(-)',r'\1 \2 ') # tokenize dash when preceded by a digit
-]
-normalize2 = [(re.compile(pattern), replace) for (pattern, replace) in normalize2]
-
-def normalize(s):
- '''Normalize and tokenize text. This is lifted from NIST mteval-v11a.pl.'''
- # Added to bypass NIST-style pre-processing of hyp and ref files -- wade
- if type(s) is not str:
- s = " ".join(s)
- # language-independent part:
- for (pattern, replace) in normalize1:
- s = re.sub(pattern, replace, s)
- s = xml.sax.saxutils.unescape(s, {'&quot;':'"'})
- # language-dependent part (assuming Western languages):
- s = " %s " % s
- if not preserve_case:
- s = s.lower() # this might not be identical to the original
- for (pattern, replace) in normalize2:
- s = re.sub(pattern, replace, s)
- return s.split()
-
-def process_nbest():
- print>>sys.stderr, "Processing nbest file"
- for line in sys.stdin:
- sep = "||| "
- fields = line[:-1].split(sep)
- normalised = normalize(fields[1])
- fields[1] = " ".join(normalised) + " "
- print>>sys.stdout,sep.join(fields)
-
-
-def process_refs():
- print>>sys.stderr, "Processing text file"
- for line in sys.stdin:
- normalised = normalize(line[:-1])
- print>>sys.stdout,(" ".join(normalised))
-
-def main():
- parser = optparse.OptionParser(usage="usage: %prog [options] < input > output")
- parser.add_option("-n","--nbest",action="store_true",default=False,dest="nbest",
- help="Process nbest file")
- (options,args) = parser.parse_args()
- if options.nbest:
- process_nbest()
- else:
- process_refs()
-
-if __name__ == "__main__":
- main()
-
diff --git a/mert/test_scorer.py b/mert/test_scorer.py
deleted file mode 100644
index 9271753f0..000000000
--- a/mert/test_scorer.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/python
-
-#
-# Calculate bleu score for test files using old (python) script
-#
-
-import os.path
-import sys
-
-
-def main():
- sys.path.append("../scripts/training/cmert-0.5")
- import bleu
- data_dir = "test_scorer_data"
- nbest_file = os.path.join(data_dir,"nbest.out")
- ref_file = os.path.join(data_dir,"reference.txt")
- bleu.preserve_case = False
- bleu.eff_ref_len = "shortest"
- bleu.nonorm = 0
-
- ref_fh = open(ref_file)
- cookedrefs = []
- for ref in ref_fh:
- cookedref = bleu.cook_refs([ref])
- cookedrefs.append(cookedref)
- ref_fh.close()
-
- nbest_fh = open(nbest_file)
- tests = []
- i = -1
- for line in nbest_fh:
- fields = line.split("||| ")
- current_i = int(fields[0])
- text = fields[1]
- if i != current_i:
- tests.append([])
- i = current_i
- tests[-1].append(text)
- nbest_fh.close()
-
- # score with first best
- cookedtests = []
- for i in range(len(tests)):
- sentence = tests[i][0]
- cookedtest = (bleu.cook_test(sentence, cookedrefs[i]))
- stats = " ".join(["%d %d" % (c,g) for (c,g) in zip(cookedtest['correct'], cookedtest['guess'])])
- print " %s %d" % (stats ,cookedtest['reflen'])
- cookedtests.append(cookedtest)
- bleu1 = bleu.score_cooked(cookedtests)
-
- # vary, and score again
- cookedtests = []
- for i in range(len(tests)):
- sentence = tests[i][0]
- if i == 7:
- sentence = tests[i][8]
- elif i == 1:
- sentences = tests[i][2]
- cookedtest = (bleu.cook_test(sentence, cookedrefs[i]))
- cookedtests.append(cookedtest)
- bleu2 = bleu.score_cooked(cookedtests)
-
-
- print "Bleus: ", bleu1,bleu2
-
-if __name__ == "__main__":
- main()
diff --git a/misc/GenerateTuples.cpp b/misc/GenerateTuples.cpp
index e46e87e06..58e9697fd 100644
--- a/misc/GenerateTuples.cpp
+++ b/misc/GenerateTuples.cpp
@@ -110,7 +110,7 @@ struct GCData {
const std::vector<std::vector<float> >& b)
: pdicts(a),weights(b),totalTuples(0),distinctTuples(0) {
- assert(pdicts.size()==weights.size());
+ CHECK(pdicts.size()==weights.size());
std::set<FactorType> distinctOutFset;
inF.resize(pdicts.size());
outF.resize(pdicts.size());
@@ -152,7 +152,7 @@ void GeneratePerFactorTgtList(size_t factorType,PPtr pptr,GCData& data,Len2Cands
data.pdicts[factorType]->GetTargetCandidates(pptr,cands);
for(std::vector<FactorTgtCand>::const_iterator cand=cands.begin(); cand!=cands.end(); ++cand) {
- assert(data.weights[factorType].size()==cand->second.size());
+ CHECK(data.weights[factorType].size()==cand->second.size());
float costs=std::inner_product(data.weights[factorType].begin(),
data.weights[factorType].end(),
cand->second.begin(),
@@ -176,7 +176,7 @@ void GenerateTupleTgtCands(OutputFactor2TgtCandList& tCand,E2Costs& e2costs,GCDa
if(gotCands) {
// enumerate tuples
- assert(data.DistinctOutFactors()==tCand.size());
+ CHECK(data.DistinctOutFactors()==tCand.size());
std::vector<unsigned> radix(data.DistinctOutFactors());
for(size_t i=0; i<tCand.size(); ++i) radix[i]=tCand[i].size();
@@ -189,7 +189,7 @@ void GenerateTupleTgtCands(OutputFactor2TgtCandList& tCand,E2Costs& e2costs,GCDa
mPhrase e(radix.size());
float costs=0.0;
for(size_t j=0; j<radix.size(); ++j) {
- assert(tuples[radix.size()*i+j]<tCand[j].size());
+ CHECK(tuples[radix.size()*i+j]<tCand[j].size());
std::pair<float,vFactor> const& mycand=tCand[j][tuples[radix.size()*i+j]];
e[j]=mycand.second;
costs+=mycand.first;
@@ -198,7 +198,7 @@ void GenerateTupleTgtCands(OutputFactor2TgtCandList& tCand,E2Costs& e2costs,GCDa
bool mismatch=0;
for(size_t j=1; !mismatch && j<e.size(); ++j)
if(e[j].size()!=e[j-1].size()) mismatch=1;
- assert(mismatch==0);
+ CHECK(mismatch==0);
#endif
std::pair<E2Costs::iterator,bool> p=e2costs.insert(std::make_pair(e,costs));
if(p.second) ++data.distinctTuples;
@@ -244,7 +244,7 @@ void GenerateCandidates(const ConfusionNet& src,
//std::cerr<<"processing state "<<curr<<" stack size: "<<stack.size()<<"\n";
- assert(curr.end()<src.GetSize());
+ CHECK(curr.end()<src.GetSize());
const ConfusionNet::Column &currCol=src[curr.end()];
for(size_t colidx=0; colidx<currCol.size(); ++colidx) {
const Word& w=currCol[colidx].first;
diff --git a/misc/Jamfile b/misc/Jamfile
new file mode 100644
index 000000000..e8a133367
--- /dev/null
+++ b/misc/Jamfile
@@ -0,0 +1,9 @@
+exe processPhraseTable : GenerateTuples.cpp processPhraseTable.cpp ../moses/src//moses ;
+
+exe processLexicalTable : processLexicalTable.cpp ../moses/src//moses ;
+
+exe queryPhraseTable : queryPhraseTable.cpp ../moses/src//moses ;
+
+exe queryLexicalTable : queryLexicalTable.cpp ../moses/src//moses ;
+
+alias programs : processPhraseTable processLexicalTable queryPhraseTable queryLexicalTable ;
diff --git a/misc/Makefile.am b/misc/Makefile.am
deleted file mode 100644
index 729ad2e0b..000000000
--- a/misc/Makefile.am
+++ /dev/null
@@ -1,16 +0,0 @@
-bin_PROGRAMS = processPhraseTable processLexicalTable queryLexicalTable queryPhraseTable
-
-processPhraseTable_SOURCES = GenerateTuples.cpp processPhraseTable.cpp
-processLexicalTable_SOURCES = processLexicalTable.cpp
-queryLexicalTable_SOURCES = queryLexicalTable.cpp
-queryPhraseTable_SOURCES = queryPhraseTable.cpp
-
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES -I$(top_srcdir)/moses/src $(BOOST_CPPFLAGS)
-
-processPhraseTable_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/moses/src -L$(top_srcdir)/OnDiskPt/src -lmoses -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-
-processLexicalTable_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/moses/src -L$(top_srcdir)/OnDiskPt/src -lmoses -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-
-queryLexicalTable_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/moses/src -L$(top_srcdir)/OnDiskPt/src -lmoses -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-
-queryPhraseTable_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/moses/src -L$(top_srcdir)/OnDiskPt/src -lmoses -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
diff --git a/misc/queryLexicalTable.cpp b/misc/queryLexicalTable.cpp
index ef6e741c1..166549267 100644
--- a/misc/queryLexicalTable.cpp
+++ b/misc/queryLexicalTable.cpp
@@ -86,7 +86,7 @@ int main(int argc, char** argv)
if(use_context) {
c_mask.push_back(0);
}
- Phrase e(Output, 0),f(Input, 0),c(Output, 0);
+ Phrase e( 0),f(0),c(0);
e.CreateFromString(e_mask, query_e, "|");
f.CreateFromString(f_mask, query_f, "|");
c.CreateFromString(c_mask, query_c,"|");
diff --git a/missing b/missing
deleted file mode 100755
index 53fa1e5c7..000000000
--- a/missing
+++ /dev/null
@@ -1,337 +0,0 @@
-#! /bin/sh
-# Common stub for a few missing GNU programs while installing.
-# Copyright (C) 1996, 1997, 1999, 2000, 2002 Free Software Foundation, Inc.
-# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2, or (at your option)
-# any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
-# 02111-1307, USA.
-
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-if test $# -eq 0; then
- echo 1>&2 "Try \`$0 --help' for more information"
- exit 1
-fi
-
-run=:
-
-# In the cases where this matters, `missing' is being run in the
-# srcdir already.
-if test -f configure.ac; then
- configure_ac=configure.ac
-else
- configure_ac=configure.in
-fi
-
-case "$1" in
---run)
- # Try to run requested program, and just exit if it succeeds.
- run=
- shift
- "$@" && exit 0
- ;;
-esac
-
-# If it does not exist, or fails to run (possibly an outdated version),
-# try to emulate it.
-case "$1" in
-
- -h|--h|--he|--hel|--help)
- echo "\
-$0 [OPTION]... PROGRAM [ARGUMENT]...
-
-Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an
-error status if there is no known handling for PROGRAM.
-
-Options:
- -h, --help display this help and exit
- -v, --version output version information and exit
- --run try to run the given command, and emulate it if it fails
-
-Supported PROGRAM values:
- aclocal touch file \`aclocal.m4'
- autoconf touch file \`configure'
- autoheader touch file \`config.h.in'
- automake touch all \`Makefile.in' files
- bison create \`y.tab.[ch]', if possible, from existing .[ch]
- flex create \`lex.yy.c', if possible, from existing .c
- help2man touch the output file
- lex create \`lex.yy.c', if possible, from existing .c
- makeinfo touch the output file
- tar try tar, gnutar, gtar, then tar without non-portable flags
- yacc create \`y.tab.[ch]', if possible, from existing .[ch]"
- ;;
-
- -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
- echo "missing 0.4 - GNU automake"
- ;;
-
- -*)
- echo 1>&2 "$0: Unknown \`$1' option"
- echo 1>&2 "Try \`$0 --help' for more information"
- exit 1
- ;;
-
- aclocal*)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified \`acinclude.m4' or \`${configure_ac}'. You might want
- to install the \`Automake' and \`Perl' packages. Grab them from
- any GNU archive site."
- touch aclocal.m4
- ;;
-
- autoconf)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified \`${configure_ac}'. You might want to install the
- \`Autoconf' and \`GNU m4' packages. Grab them from any GNU
- archive site."
- touch configure
- ;;
-
- autoheader)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified \`acconfig.h' or \`${configure_ac}'. You might want
- to install the \`Autoconf' and \`GNU m4' packages. Grab them
- from any GNU archive site."
- files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}`
- test -z "$files" && files="config.h"
- touch_files=
- for f in $files; do
- case "$f" in
- *:*) touch_files="$touch_files "`echo "$f" |
- sed -e 's/^[^:]*://' -e 's/:.*//'`;;
- *) touch_files="$touch_files $f.in";;
- esac
- done
- touch $touch_files
- ;;
-
- automake*)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'.
- You might want to install the \`Automake' and \`Perl' packages.
- Grab them from any GNU archive site."
- find . -type f -name Makefile.am -print |
- sed 's/\.am$/.in/' |
- while read f; do touch "$f"; done
- ;;
-
- autom4te)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is needed, and you do not seem to have it handy on your
- system. You might have modified some files without having the
- proper tools for further handling them.
- You can get \`$1Help2man' as part of \`Autoconf' from any GNU
- archive site."
-
- file=`echo "$*" | sed -n 's/.*--output[ =]*\([^ ]*\).*/\1/p'`
- test -z "$file" && file=`echo "$*" | sed -n 's/.*-o[ ]*\([^ ]*\).*/\1/p'`
- if test -f "$file"; then
- touch $file
- else
- test -z "$file" || exec >$file
- echo "#! /bin/sh"
- echo "# Created by GNU Automake missing as a replacement of"
- echo "# $ $@"
- echo "exit 0"
- chmod +x $file
- exit 1
- fi
- ;;
-
- bison|yacc)
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified a \`.y' file. You may need the \`Bison' package
- in order for those modifications to take effect. You can get
- \`Bison' from any GNU archive site."
- rm -f y.tab.c y.tab.h
- if [ $# -ne 1 ]; then
- eval LASTARG="\${$#}"
- case "$LASTARG" in
- *.y)
- SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'`
- if [ -f "$SRCFILE" ]; then
- cp "$SRCFILE" y.tab.c
- fi
- SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'`
- if [ -f "$SRCFILE" ]; then
- cp "$SRCFILE" y.tab.h
- fi
- ;;
- esac
- fi
- if [ ! -f y.tab.h ]; then
- echo >y.tab.h
- fi
- if [ ! -f y.tab.c ]; then
- echo 'main() { return 0; }' >y.tab.c
- fi
- ;;
-
- lex|flex)
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified a \`.l' file. You may need the \`Flex' package
- in order for those modifications to take effect. You can get
- \`Flex' from any GNU archive site."
- rm -f lex.yy.c
- if [ $# -ne 1 ]; then
- eval LASTARG="\${$#}"
- case "$LASTARG" in
- *.l)
- SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'`
- if [ -f "$SRCFILE" ]; then
- cp "$SRCFILE" lex.yy.c
- fi
- ;;
- esac
- fi
- if [ ! -f lex.yy.c ]; then
- echo 'main() { return 0; }' >lex.yy.c
- fi
- ;;
-
- help2man)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified a dependency of a manual page. You may need the
- \`Help2man' package in order for those modifications to take
- effect. You can get \`Help2man' from any GNU archive site."
-
- file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'`
- if test -z "$file"; then
- file=`echo "$*" | sed -n 's/.*--output=\([^ ]*\).*/\1/p'`
- fi
- if [ -f "$file" ]; then
- touch $file
- else
- test -z "$file" || exec >$file
- echo ".ab help2man is required to generate this page"
- exit 1
- fi
- ;;
-
- makeinfo)
- if test -z "$run" && (makeinfo --version) > /dev/null 2>&1; then
- # We have makeinfo, but it failed.
- exit 1
- fi
-
- echo 1>&2 "\
-WARNING: \`$1' is missing on your system. You should only need it if
- you modified a \`.texi' or \`.texinfo' file, or any other file
- indirectly affecting the aspect of the manual. The spurious
- call might also be the consequence of using a buggy \`make' (AIX,
- DU, IRIX). You might want to install the \`Texinfo' package or
- the \`GNU make' package. Grab either from any GNU archive site."
- file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'`
- if test -z "$file"; then
- file=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'`
- file=`sed -n '/^@setfilename/ { s/.* \([^ ]*\) *$/\1/; p; q; }' $file`
- fi
- touch $file
- ;;
-
- tar)
- shift
- if test -n "$run"; then
- echo 1>&2 "ERROR: \`tar' requires --run"
- exit 1
- fi
-
- # We have already tried tar in the generic part.
- # Look for gnutar/gtar before invocation to avoid ugly error
- # messages.
- if (gnutar --version > /dev/null 2>&1); then
- gnutar "$@" && exit 0
- fi
- if (gtar --version > /dev/null 2>&1); then
- gtar "$@" && exit 0
- fi
- firstarg="$1"
- if shift; then
- case "$firstarg" in
- *o*)
- firstarg=`echo "$firstarg" | sed s/o//`
- tar "$firstarg" "$@" && exit 0
- ;;
- esac
- case "$firstarg" in
- *h*)
- firstarg=`echo "$firstarg" | sed s/h//`
- tar "$firstarg" "$@" && exit 0
- ;;
- esac
- fi
-
- echo 1>&2 "\
-WARNING: I can't seem to be able to run \`tar' with the given arguments.
- You may want to install GNU tar or Free paxutils, or check the
- command line arguments."
- exit 1
- ;;
-
- *)
- echo 1>&2 "\
-WARNING: \`$1' is needed, and you do not seem to have it handy on your
- system. You might have modified some files without having the
- proper tools for further handling them. Check the \`README' file,
- it often tells you about the needed prerequirements for installing
- this package. You may also peek at any GNU archive site, in case
- some other package would contain this missing \`$1' program."
- exit 1
- ;;
-esac
-
-exit 0
-
diff --git a/mkinstalldirs b/mkinstalldirs
deleted file mode 100755
index d2d5f21b6..000000000
--- a/mkinstalldirs
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /bin/sh
-# mkinstalldirs --- make directory hierarchy
-# Author: Noah Friedman <friedman@prep.ai.mit.edu>
-# Created: 1993-05-16
-# Public domain
-
-errstatus=0
-dirmode=""
-
-usage="\
-Usage: mkinstalldirs [-h] [--help] [-m mode] dir ..."
-
-# process command line arguments
-while test $# -gt 0 ; do
- case $1 in
- -h | --help | --h*) # -h for help
- echo "$usage" 1>&2
- exit 0
- ;;
- -m) # -m PERM arg
- shift
- test $# -eq 0 && { echo "$usage" 1>&2; exit 1; }
- dirmode=$1
- shift
- ;;
- --) # stop option processing
- shift
- break
- ;;
- -*) # unknown option
- echo "$usage" 1>&2
- exit 1
- ;;
- *) # first non-opt arg
- break
- ;;
- esac
-done
-
-for file
-do
- if test -d "$file"; then
- shift
- else
- break
- fi
-done
-
-case $# in
- 0) exit 0 ;;
-esac
-
-case $dirmode in
- '')
- if mkdir -p -- . 2>/dev/null; then
- echo "mkdir -p -- $*"
- exec mkdir -p -- "$@"
- fi
- ;;
- *)
- if mkdir -m "$dirmode" -p -- . 2>/dev/null; then
- echo "mkdir -m $dirmode -p -- $*"
- exec mkdir -m "$dirmode" -p -- "$@"
- fi
- ;;
-esac
-
-for file
-do
- set fnord `echo ":$file" | sed -ne 's/^:\//#/;s/^://;s/\// /g;s/^#/\//;p'`
- shift
-
- pathcomp=
- for d
- do
- pathcomp="$pathcomp$d"
- case $pathcomp in
- -*) pathcomp=./$pathcomp ;;
- esac
-
- if test ! -d "$pathcomp"; then
- echo "mkdir $pathcomp"
-
- mkdir "$pathcomp" || lasterr=$?
-
- if test ! -d "$pathcomp"; then
- errstatus=$lasterr
- else
- if test ! -z "$dirmode"; then
- echo "chmod $dirmode $pathcomp"
- lasterr=""
- chmod "$dirmode" "$pathcomp" || lasterr=$?
-
- if test ! -z "$lasterr"; then
- errstatus=$lasterr
- fi
- fi
- fi
- fi
-
- pathcomp="$pathcomp/"
- done
-done
-
-exit $errstatus
-
-# Local Variables:
-# mode: shell-script
-# sh-indentation: 2
-# End:
-# mkinstalldirs ends here
diff --git a/moses-chart-cmd/src/IOWrapper.cpp b/moses-chart-cmd/src/IOWrapper.cpp
index 7833b040b..cf90b877b 100644
--- a/moses-chart-cmd/src/IOWrapper.cpp
+++ b/moses-chart-cmd/src/IOWrapper.cpp
@@ -153,7 +153,7 @@ InputType*IOWrapper::GetInput(InputType* inputType)
*/
void OutputSurface(std::ostream &out, const Phrase &phrase, const std::vector<FactorType> &outputFactorOrder, bool reportAllFactors)
{
- assert(outputFactorOrder.size() > 0);
+ CHECK(outputFactorOrder.size() > 0);
if (reportAllFactors == true) {
out << phrase;
} else {
@@ -258,7 +258,7 @@ void IOWrapper::OutputDetailedTranslationReport(
}
std::ostringstream out;
OutputTranslationOptions(out, hypo, translationId);
- assert(m_detailOutputCollector);
+ CHECK(m_detailOutputCollector);
m_detailOutputCollector->Write(translationId, out.str());
}
@@ -280,11 +280,11 @@ void IOWrapper::OutputBestHypo(const ChartHypothesis *hypo, long translationId,
if (StaticData::Instance().IsPathRecoveryEnabled()) {
out << "||| ";
}
- Phrase outPhrase(Output, ARRAY_SIZE_INCR);
+ Phrase outPhrase(ARRAY_SIZE_INCR);
hypo->CreateOutputPhrase(outPhrase);
// delete 1st & last
- assert(outPhrase.GetSize() >= 2);
+ CHECK(outPhrase.GetSize() >= 2);
outPhrase.RemoveWord(0);
outPhrase.RemoveWord(outPhrase.GetSize() - 1);
@@ -338,7 +338,7 @@ void IOWrapper::OutputNBestList(const ChartTrellisPathList &nBestList, const Cha
Moses::Phrase outputPhrase = path.GetOutputPhrase();
// delete 1st & last
- assert(outputPhrase.GetSize() >= 2);
+ CHECK(outputPhrase.GetSize() >= 2);
outputPhrase.RemoveWord(0);
outputPhrase.RemoveWord(outputPhrase.GetSize() - 1);
@@ -439,7 +439,7 @@ void IOWrapper::OutputNBestList(const ChartTrellisPathList &nBestList, const Cha
out <<std::flush;
- assert(m_nBestOutputCollector);
+ CHECK(m_nBestOutputCollector);
m_nBestOutputCollector->Write(translationId, out.str());
}
diff --git a/moses-chart-cmd/src/Jamfile b/moses-chart-cmd/src/Jamfile
new file mode 100644
index 000000000..583b4664e
--- /dev/null
+++ b/moses-chart-cmd/src/Jamfile
@@ -0,0 +1,3 @@
+exe moses_chart : Main.cpp mbr.cpp IOWrapper.cpp TranslationAnalysis.cpp ../../moses/src//moses ;
+
+install legacy-install : moses_chart : <location>. <install-type>EXE <install-dependencies>on <link>shared:<dll-path>$(TOP)/moses-chart-cmd/src <link>shared:<install-type>LIB ;
diff --git a/moses-chart-cmd/src/Main.cpp b/moses-chart-cmd/src/Main.cpp
index b10cea070..2c9002720 100644
--- a/moses-chart-cmd/src/Main.cpp
+++ b/moses-chart-cmd/src/Main.cpp
@@ -92,7 +92,7 @@ public:
ChartManager manager(*m_source, &system);
manager.ProcessSentence();
- assert(!staticData.UseMBR());
+ CHECK(!staticData.UseMBR());
// 1-best
const ChartHypothesis *bestHypo = manager.GetBestHypothesis();
@@ -123,7 +123,7 @@ public:
std::ostringstream out;
manager.GetSearchGraph(lineNumber, out);
OutputCollector *oc = m_ioWrapper.GetSearchGraphOutputCollector();
- assert(oc);
+ CHECK(oc);
oc->Write(lineNumber, out.str());
}
@@ -147,7 +147,7 @@ bool ReadInput(IOWrapper &ioWrapper, InputTypeEnum inputType, InputType*& source
delete source;
switch(inputType) {
case SentenceInput:
- source = ioWrapper.GetInput(new Sentence(Input));
+ source = ioWrapper.GetInput(new Sentence);
break;
case ConfusionNetworkInput:
source = ioWrapper.GetInput(new ConfusionNet);
@@ -156,7 +156,7 @@ bool ReadInput(IOWrapper &ioWrapper, InputTypeEnum inputType, InputType*& source
source = ioWrapper.GetInput(new WordLattice);
break;
case TreeInputType:
- source = ioWrapper.GetInput(new TreeInput(Input));
+ source = ioWrapper.GetInput(new TreeInput);
break;
default:
TRACE_ERR("Unknown input type: " << inputType << "\n");
@@ -230,7 +230,7 @@ int main(int argc, char* argv[])
exit(0);
}
- assert(staticData.GetSearchAlgorithm() == ChartDecoding);
+ CHECK(staticData.GetSearchAlgorithm() == ChartDecoding);
// set up read/writing class
IOWrapper *ioWrapper = GetIODevice(staticData);
diff --git a/moses-chart-cmd/src/Makefile.am b/moses-chart-cmd/src/Makefile.am
deleted file mode 100644
index 04d350c22..000000000
--- a/moses-chart-cmd/src/Makefile.am
+++ /dev/null
@@ -1,10 +0,0 @@
-bin_PROGRAMS = moses_chart
-moses_chart_SOURCES = Main.cpp mbr.cpp IOWrapper.cpp TranslationAnalysis.cpp
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES -DUSE_HYPO_POOL -I$(top_srcdir)/moses/src $(BOOST_CPPFLAGS)
-
-moses_chart_LDADD = -L$(top_srcdir)/moses/src -L$(top_srcdir)/OnDiskPt/src -lmoses -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-moses_chart_DEPENDENCIES = $(top_srcdir)/moses/src/libmoses.la $(top_srcdir)/OnDiskPt/src/libOnDiskPt.a
-
-
-
-
diff --git a/moses-cmd/src/IOWrapper.cpp b/moses-cmd/src/IOWrapper.cpp
index 86a42c82d..270f17c82 100644
--- a/moses-cmd/src/IOWrapper.cpp
+++ b/moses-cmd/src/IOWrapper.cpp
@@ -160,13 +160,13 @@ void IOWrapper::Initialization(const std::vector<FactorType> &/*inputFactorOrder
if (staticData.IsDetailedTranslationReportingEnabled()) {
const std::string &path = staticData.GetDetailedTranslationReportingFilePath();
m_detailedTranslationReportingStream = new std::ofstream(path.c_str());
- assert(m_detailedTranslationReportingStream->good());
+ CHECK(m_detailedTranslationReportingStream->good());
}
// sentence alignment output
if (! staticData.GetAlignmentOutputFile().empty()) {
m_alignmentOutputStream = new ofstream(staticData.GetAlignmentOutputFile().c_str());
- assert(m_alignmentOutputStream->good());
+ CHECK(m_alignmentOutputStream->good());
}
}
@@ -190,7 +190,7 @@ InputType*IOWrapper::GetInput(InputType* inputType)
*/
void OutputSurface(std::ostream &out, const Phrase &phrase, const std::vector<FactorType> &outputFactorOrder, bool reportAllFactors)
{
- assert(outputFactorOrder.size() > 0);
+ CHECK(outputFactorOrder.size() > 0);
if (reportAllFactors == true) {
out << phrase;
} else {
@@ -561,7 +561,7 @@ bool ReadInput(IOWrapper &ioWrapper, InputTypeEnum inputType, InputType*& source
delete source;
switch(inputType) {
case SentenceInput:
- source = ioWrapper.GetInput(new Sentence(Input));
+ source = ioWrapper.GetInput(new Sentence);
break;
case ConfusionNetworkInput:
source = ioWrapper.GetInput(new ConfusionNet);
diff --git a/moses-cmd/src/IOWrapper.h b/moses-cmd/src/IOWrapper.h
index e28593825..be792e276 100755..100644
--- a/moses-cmd/src/IOWrapper.h
+++ b/moses-cmd/src/IOWrapper.h
@@ -38,7 +38,7 @@ POSSIBILITY OF SUCH DAMAGE.
#include <fstream>
#include <ostream>
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include "TypeDef.h"
#include "Sentence.h"
diff --git a/moses-cmd/src/Jamfile b/moses-cmd/src/Jamfile
new file mode 100644
index 000000000..d4eb9d0b2
--- /dev/null
+++ b/moses-cmd/src/Jamfile
@@ -0,0 +1,8 @@
+alias deps : ../../moses/src//moses ;
+
+exe moses : Main.cpp mbr.cpp IOWrapper.cpp TranslationAnalysis.cpp LatticeMBR.cpp deps ;
+exe lmbrgrid : LatticeMBRGrid.cpp LatticeMBR.cpp IOWrapper.cpp deps ;
+
+alias programs : moses lmbrgrid ;
+
+install legacy-install : programs : <location>. <install-type>EXE <install-dependencies>on <link>shared:<dll-path>$(TOP)/moses-cmd/src <link>shared:<install-type>LIB ;
diff --git a/moses-cmd/src/LatticeMBR.cpp b/moses-cmd/src/LatticeMBR.cpp
index ef7a5f71a..cf172d737 100644
--- a/moses-cmd/src/LatticeMBR.cpp
+++ b/moses-cmd/src/LatticeMBR.cpp
@@ -36,7 +36,7 @@ void extract_ngrams(const vector<Word >& sentence, map < Phrase, int > & allngr
{
for (int k = 0; k < (int)bleu_order; k++) {
for(int i =0; i < max((int)sentence.size()-k,0); i++) {
- Phrase ngram(Output, k+1);
+ Phrase ngram( k+1);
for ( int j = i; j<= i+k; j++) {
ngram.AddWord(sentence[j]);
}
@@ -191,7 +191,7 @@ void pruneLatticeFB(Lattice & connectedHyp, map < const Hypothesis*, set <const
// is its best predecessor already included ?
if (survivingHyps.find(currHyp->GetPrevHypo()) != survivingHyps.end()) { //yes, then add an edge
vector <Edge>& edges = incomingEdges[currHyp];
- Edge winningEdge(currHyp->GetPrevHypo(),currHyp,scale*(currHyp->GetScore() - currHyp->GetPrevHypo()->GetScore()),currHyp->GetTargetPhrase());
+ Edge winningEdge(currHyp->GetPrevHypo(),currHyp,scale*(currHyp->GetScore() - currHyp->GetPrevHypo()->GetScore()),currHyp->GetCurrTargetPhrase());
edges.push_back(winningEdge);
++numEdgesCreated;
}
@@ -205,7 +205,7 @@ void pruneLatticeFB(Lattice & connectedHyp, map < const Hypothesis*, set <const
const Hypothesis* loserPrevHypo = loserHypo->GetPrevHypo();
if (survivingHyps.find(loserPrevHypo) != survivingHyps.end()) { //found it, add edge
double arcScore = loserHypo->GetScore() - loserPrevHypo->GetScore();
- Edge losingEdge(loserPrevHypo, currHyp, arcScore*scale, loserHypo->GetTargetPhrase());
+ Edge losingEdge(loserPrevHypo, currHyp, arcScore*scale, loserHypo->GetCurrTargetPhrase());
vector <Edge>& edges = incomingEdges[currHyp];
edges.push_back(losingEdge);
++numEdgesCreated;
@@ -227,7 +227,7 @@ void pruneLatticeFB(Lattice & connectedHyp, map < const Hypothesis*, set <const
//Curr Hyp can be : a) the best predecessor of succ b) or an arc attached to succ
if (succHyp->GetPrevHypo() == currHyp) { //best predecessor
vector <Edge>& succEdges = incomingEdges[succHyp];
- Edge succWinningEdge(currHyp, succHyp, scale*(succHyp->GetScore() - currHyp->GetScore()), succHyp->GetTargetPhrase());
+ Edge succWinningEdge(currHyp, succHyp, scale*(succHyp->GetScore() - currHyp->GetScore()), succHyp->GetCurrTargetPhrase());
succEdges.push_back(succWinningEdge);
survivingHyps.insert(succHyp);
++numEdgesCreated;
@@ -244,7 +244,7 @@ void pruneLatticeFB(Lattice & connectedHyp, map < const Hypothesis*, set <const
if (loserPrevHypo == currHyp) { //found it
vector <Edge>& succEdges = incomingEdges[succHyp];
double arcScore = loserHypo->GetScore() - currHyp->GetScore();
- Edge losingEdge(currHyp, succHyp,scale* arcScore, loserHypo->GetTargetPhrase());
+ Edge losingEdge(currHyp, succHyp,scale* arcScore, loserHypo->GetCurrTargetPhrase());
succEdges.push_back(losingEdge);
++numEdgesCreated;
}
@@ -403,7 +403,7 @@ const NgramHistory& Edge::GetNgrams(map<const Hypothesis*, vector<Edge> > & inco
for (size_t start = 0; start < currPhrase.GetSize(); ++start) {
for (size_t end = start; end < start + bleu_order; ++end) {
if (end < currPhrase.GetSize()) {
- Phrase edgeNgram(Output, end-start+1);
+ Phrase edgeNgram(end-start+1);
for (size_t index = start; index <= end; ++index) {
edgeNgram.AddWord(currPhrase.GetWord(index));
}
@@ -434,8 +434,8 @@ const NgramHistory& Edge::GetNgrams(map<const Hypothesis*, vector<Edge> > & inco
cerr << "edgeInNgram: " << edgeIncomingNgram << endl;
}
- Phrase edgeSuffix(Output, ARRAY_SIZE_INCR);
- Phrase ngramSuffix(Output, ARRAY_SIZE_INCR);
+ Phrase edgeSuffix(ARRAY_SIZE_INCR);
+ Phrase ngramSuffix(ARRAY_SIZE_INCR);
GetPhraseSuffix(edgeWords,back,edgeSuffix);
GetPhraseSuffix(edgeIncomingNgram,back,ngramSuffix);
diff --git a/moses-cmd/src/LatticeMBRGrid.cpp b/moses-cmd/src/LatticeMBRGrid.cpp
index bd7564279..8bd52b7d7 100644
--- a/moses-cmd/src/LatticeMBRGrid.cpp
+++ b/moses-cmd/src/LatticeMBRGrid.cpp
@@ -64,7 +64,7 @@ public:
/** Add a parameter with key, command line argument, and default value */
void addParam(gridkey key, const string& arg, float defaultValue) {
m_args[arg] = key;
- assert(m_grid.find(key) == m_grid.end());
+ CHECK(m_grid.find(key) == m_grid.end());
m_grid[key].push_back(defaultValue);
}
@@ -172,7 +172,7 @@ int main(int argc, char* argv[])
while(ReadInput(*ioWrapper,staticData.GetInputType(),source)) {
++lineCount;
- Sentence sentence(Input);
+ Sentence sentence;
const TranslationSystem& system = staticData.GetTranslationSystem(TranslationSystem::DEFAULT);
Manager manager(*source,staticData.GetSearchAlgorithm(), &system);
manager.ProcessSentence();
diff --git a/moses-cmd/src/Main.cpp b/moses-cmd/src/Main.cpp
index 846f27b7f..0eccac246 100644
--- a/moses-cmd/src/Main.cpp
+++ b/moses-cmd/src/Main.cpp
@@ -95,7 +95,7 @@ public:
// shorthand for "global data"
const StaticData &staticData = StaticData::Instance();
// input sentence
- Sentence sentence(Input);
+ Sentence sentence();
// set translation system
const TranslationSystem& system = staticData.GetTranslationSystem(TranslationSystem::DEFAULT);
diff --git a/moses-cmd/src/Makefile.am b/moses-cmd/src/Makefile.am
deleted file mode 100644
index 055277ce4..000000000
--- a/moses-cmd/src/Makefile.am
+++ /dev/null
@@ -1,13 +0,0 @@
-bin_PROGRAMS = moses lmbrgrid checkplf
-
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES -DUSE_HYPO_POOL -I$(top_srcdir)/moses/src $(BOOST_CPPFLAGS)
-
-checkplf_SOURCES = checkplf.cpp
-checkplf_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/OnDiskPt/src -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-
-moses_SOURCES = Main.cpp mbr.cpp IOWrapper.cpp TranslationAnalysis.cpp LatticeMBR.cpp
-moses_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/OnDiskPt/src -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
-
-
-lmbrgrid_SOURCES = LatticeMBRGrid.cpp LatticeMBR.cpp IOWrapper.cpp
-lmbrgrid_LDADD = $(top_builddir)/moses/src/libmoses.la -L$(top_srcdir)/OnDiskPt/src -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
diff --git a/moses/src/AlignmentInfo.cpp b/moses/src/AlignmentInfo.cpp
index c3b10a2d5..3af17870e 100644
--- a/moses/src/AlignmentInfo.cpp
+++ b/moses/src/AlignmentInfo.cpp
@@ -17,7 +17,7 @@
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
#include <algorithm>
-#include <cassert>
+#include "util/check.hh"
#include "AlignmentInfo.h"
#include "TypeDef.h"
#include "StaticData.h"
@@ -76,7 +76,7 @@ std::vector< const std::pair<size_t,size_t>* > AlignmentInfo::GetSortedAlignment
break;
default:
- assert(false);
+ CHECK(false);
}
return ret;
diff --git a/moses/src/BilingualDynSuffixArray.cpp b/moses/src/BilingualDynSuffixArray.cpp
index b1be289a9..4ae3ab932 100644
--- a/moses/src/BilingualDynSuffixArray.cpp
+++ b/moses/src/BilingualDynSuffixArray.cpp
@@ -46,10 +46,10 @@ bool BilingualDynSuffixArray::Load(
InputFileStream sourceStrme(source);
InputFileStream targetStrme(target);
cerr << "Loading source corpus...\n";
- LoadCorpus(sourceStrme, m_inputFactors, Input, *m_srcCorpus, m_srcSntBreaks, m_srcVocab);
+ LoadCorpus(sourceStrme, m_inputFactors, *m_srcCorpus, m_srcSntBreaks, m_srcVocab);
cerr << "Loading target corpus...\n";
- LoadCorpus(targetStrme, m_outputFactors, Output, *m_trgCorpus, m_trgSntBreaks, m_trgVocab);
- assert(m_srcSntBreaks.size() == m_trgSntBreaks.size());
+ LoadCorpus(targetStrme, m_outputFactors,*m_trgCorpus, m_trgSntBreaks, m_trgVocab);
+ CHECK(m_srcSntBreaks.size() == m_trgSntBreaks.size());
// build suffix arrays and auxilliary arrays
cerr << "Building Source Suffix Array...\n";
@@ -76,7 +76,7 @@ int BilingualDynSuffixArray::LoadRawAlignments(InputFileStream& align)
std::vector<int> vtmp;
while(getline(align, line)) {
Utils::splitToInt(line, vtmp, "- ");
- assert(vtmp.size() % 2 == 0);
+ CHECK(vtmp.size() % 2 == 0);
std::vector<short> vAlgn; // store as short ints for memory
for (std::vector<int>::const_iterator itr = vtmp.begin();
itr != vtmp.end(); ++itr) {
@@ -90,7 +90,7 @@ int BilingualDynSuffixArray::LoadRawAlignments(string& align) {
// stores the alignments in the raw file format
vector<int> vtmp;
Utils::splitToInt(align, vtmp, "- ");
- assert(vtmp.size() % 2 == 0);
+ CHECK(vtmp.size() % 2 == 0);
vector<short> vAlgn; // store as short ints for memory
for (std::vector<int>::const_iterator itr = vtmp.begin();
itr != vtmp.end(); ++itr) {
@@ -108,7 +108,7 @@ int BilingualDynSuffixArray::LoadAlignments(InputFileStream& align)
while(getline(align, line)) {
Utils::splitToInt(line, vtmp, "- ");
- assert(vtmp.size() % 2 == 0);
+ CHECK(vtmp.size() % 2 == 0);
int sourceSize = GetSourceSentenceSize(sntIndex);
int targetSize = GetTargetSentenceSize(sntIndex);
@@ -117,8 +117,8 @@ int BilingualDynSuffixArray::LoadAlignments(InputFileStream& align)
for(int i=0; i < (int)vtmp.size(); i+=2) {
int sourcePos = vtmp[i];
int targetPos = vtmp[i+1];
- assert(sourcePos < sourceSize);
- assert(targetPos < targetSize);
+ CHECK(sourcePos < sourceSize);
+ CHECK(targetPos < targetSize);
curSnt.alignedList[sourcePos].push_back(targetPos); // list of target nodes for each source word
curSnt.numberAligned[targetPos]++; // cnt of how many source words connect to this target word
@@ -176,7 +176,7 @@ void BilingualDynSuffixArray::CleanUp()
}
int BilingualDynSuffixArray::LoadCorpus(InputFileStream& corpus, const FactorList& factors,
- const FactorDirection& direction, std::vector<wordID_t>& cArray, std::vector<wordID_t>& sntArray,
+ std::vector<wordID_t>& cArray, std::vector<wordID_t>& sntArray,
Vocab* vocab)
{
std::string line, word;
@@ -185,7 +185,7 @@ int BilingualDynSuffixArray::LoadCorpus(InputFileStream& corpus, const FactorLis
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();
while(getline(corpus, line)) {
sntArray.push_back(sntIdx);
- Phrase phrase(direction, ARRAY_SIZE_INCR);
+ Phrase phrase(ARRAY_SIZE_INCR);
// parse phrase
phrase.CreateFromString( factors, line, factorDelimiter);
// store words in vocabulary and corpus
@@ -240,7 +240,7 @@ pair<float, float> BilingualDynSuffixArray::GetLexicalWeight(const PhrasePair& p
CacheWordProbs(srcWord);
itrCache = m_wordPairCache.find(wordpair); // search cache again
}
- assert(itrCache != m_wordPairCache.end());
+ CHECK(itrCache != m_wordPairCache.end());
srcSumPairProbs += itrCache->second.first;
targetProbs[wordpair] = itrCache->second.second;
}
@@ -255,7 +255,7 @@ pair<float, float> BilingualDynSuffixArray::GetLexicalWeight(const PhrasePair& p
CacheWordProbs(srcWord);
itrCache = m_wordPairCache.find(wordpair); // search cache again
}
- assert(itrCache != m_wordPairCache.end());
+ CHECK(itrCache != m_wordPairCache.end());
srcSumPairProbs += itrCache->second.first;
targetProbs[wordpair] = itrCache->second.second;
}
@@ -306,13 +306,13 @@ void BilingualDynSuffixArray::CacheWordProbs(wordID_t srcWord) const
std::map<wordID_t, int> counts;
std::vector<wordID_t> sword(1, srcWord), wrdIndices;
bool ret = m_srcSA->GetCorpusIndex(&sword, &wrdIndices);
- assert(ret);
+ CHECK(ret);
std::vector<int> sntIndexes = GetSntIndexes(wrdIndices, 1, m_srcSntBreaks);
float denom(0);
// for each occurrence of this word
for(size_t snt = 0; snt < sntIndexes.size(); ++snt) {
int sntIdx = sntIndexes.at(snt); // get corpus index for sentence
- assert(sntIdx != -1);
+ CHECK(sntIdx != -1);
int srcWrdSntIdx = wrdIndices.at(snt) - m_srcSntBreaks.at(sntIdx); // get word index in sentence
const std::vector<int> srcAlg = GetSentenceAlignment(sntIdx).alignedList.at(srcWrdSntIdx); // list of target words for this source word
if(srcAlg.size() == 0) {
@@ -356,7 +356,7 @@ TargetPhrase* BilingualDynSuffixArray::GetMosesFactorIDs(const SAPhrase& phrase)
TargetPhrase* targetPhrase = new TargetPhrase(Output);
for(size_t i=0; i < phrase.words.size(); ++i) { // look up trg words
Word& word = m_trgVocab->GetWord( phrase.words[i]);
- assert(word != m_trgVocab->GetkOOVWord());
+ CHECK(word != m_trgVocab->GetkOOVWord());
targetPhrase->AddWord(word);
}
// scoring
@@ -409,7 +409,7 @@ void BilingualDynSuffixArray::GetTargetPhrasesByLexicalWeight(const Phrase& src,
for(iterPhrases = phraseCounts.begin(); iterPhrases != phraseCounts.end(); ++iterPhrases) {
float trg2SrcMLE = float(iterPhrases->second) / totalTrgPhrases;
itrLexW = lexicalWeights.find(iterPhrases->first);
- assert(itrLexW != lexicalWeights.end());
+ CHECK(itrLexW != lexicalWeights.end());
Scores scoreVector(3);
scoreVector[0] = trg2SrcMLE;
scoreVector[1] = itrLexW->second.first;
@@ -458,7 +458,7 @@ void BilingualDynSuffixArray::addSntPair(string& source, string& target, string&
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();
const unsigned oldSrcCrpSize = m_srcCorpus->size(), oldTrgCrpSize = m_trgCorpus->size();
cerr << "old source corpus size = " << oldSrcCrpSize << "\told target size = " << oldTrgCrpSize << endl;
- Phrase sphrase(Input, ARRAY_SIZE_INCR);
+ Phrase sphrase(ARRAY_SIZE_INCR);
sphrase.CreateFromString(m_inputFactors, source, factorDelimiter);
m_srcVocab->MakeOpen();
wordID_t sIDs[sphrase.GetSize()];
@@ -473,7 +473,7 @@ void BilingualDynSuffixArray::addSntPair(string& source, string& target, string&
}
m_srcSntBreaks.push_back(oldSrcCrpSize); // former end of corpus is index of new sentence
m_srcVocab->MakeClosed();
- Phrase tphrase(Output, ARRAY_SIZE_INCR);
+ Phrase tphrase(ARRAY_SIZE_INCR);
tphrase.CreateFromString(m_outputFactors, target, factorDelimiter);
m_trgVocab->MakeOpen();
wordID_t tIDs[tphrase.GetSize()];
diff --git a/moses/src/BilingualDynSuffixArray.h b/moses/src/BilingualDynSuffixArray.h
index 6f33911b9..137978c14 100644
--- a/moses/src/BilingualDynSuffixArray.h
+++ b/moses/src/BilingualDynSuffixArray.h
@@ -22,7 +22,7 @@ public:
void SetId(size_t pos, wordID_t id)
{
- assert(pos < words.size());
+ CHECK(pos < words.size());
words[pos] = id;
}
bool operator<(const SAPhrase& phr2) const
@@ -109,7 +109,7 @@ private:
const size_t m_maxPhraseLength, m_maxSampleSize;
int LoadCorpus(InputFileStream&, const std::vector<FactorType>& factors,
- const FactorDirection& direction, std::vector<wordID_t>&, std::vector<wordID_t>&,
+ std::vector<wordID_t>&, std::vector<wordID_t>&,
Vocab*);
int LoadAlignments(InputFileStream& aligs);
int LoadRawAlignments(InputFileStream& aligs);
diff --git a/moses/src/BitmapContainer.cpp b/moses/src/BitmapContainer.cpp
index 48a5249cf..c80f3b542 100644
--- a/moses/src/BitmapContainer.cpp
+++ b/moses/src/BitmapContainer.cpp
@@ -59,7 +59,7 @@ public:
const TranslationSystem* m_system;
bool operator()(const Hypothesis* hypoA, const Hypothesis* hypoB) const {
- assert (m_transOptRange != NULL);
+ CHECK(m_transOptRange != NULL);
const float weightDistortion = m_system->GetWeightDistortion();
const DistortionScoreProducer *dsp = m_system->GetDistortionProducer();
@@ -149,11 +149,11 @@ BackwardsEdge::BackwardsEdge(const BitmapContainer &prevBitmapContainer
}
if (m_translations.size() > 1) {
- assert(m_translations.Get(0)->GetFutureScore() >= m_translations.Get(1)->GetFutureScore());
+ CHECK(m_translations.Get(0)->GetFutureScore() >= m_translations.Get(1)->GetFutureScore());
}
if (m_hypotheses.size() > 1) {
- assert(m_hypotheses[0]->GetTotalScore() >= m_hypotheses[1]->GetTotalScore());
+ CHECK(m_hypotheses[0]->GetTotalScore() >= m_hypotheses[1]->GetTotalScore());
}
HypothesisScoreOrdererWithDistortion orderer (&transOptRange, system);
@@ -202,8 +202,8 @@ BackwardsEdge::SeenPosition(const size_t x, const size_t y)
void
BackwardsEdge::SetSeenPosition(const size_t x, const size_t y)
{
- assert(x < (1<<17));
- assert(y < (1<<17));
+ CHECK(x < (1<<17));
+ CHECK(y < (1<<17));
m_seenPosition.insert((x<<16) + y);
}
@@ -367,7 +367,7 @@ BitmapContainer::AddHypothesis(Hypothesis *hypothesis)
++iter;
}
- assert(itemExists == false);
+ CHECK(itemExists == false);
m_hypotheses.push_back(hypothesis);
}
@@ -410,12 +410,12 @@ BitmapContainer::ProcessBestHypothesis()
HypothesisQueueItem *item = Dequeue();
// If the priority queue is exhausted, we are done and should have exited
- assert(item != NULL);
+ CHECK(item != NULL);
// check we are pulling things off of priority queue in right order
if (!Empty()) {
HypothesisQueueItem *check = Dequeue(true);
- assert(item->GetHypothesis()->GetTotalScore() >= check->GetHypothesis()->GetTotalScore());
+ CHECK(item->GetHypothesis()->GetTotalScore() >= check->GetHypothesis()->GetTotalScore());
}
// Logging for the criminally insane
diff --git a/moses/src/ChartCell.cpp b/moses/src/ChartCell.cpp
index ac5c47089..21c5dd68a 100644
--- a/moses/src/ChartCell.cpp
+++ b/moses/src/ChartCell.cpp
@@ -62,7 +62,7 @@ const HypoList &ChartCell::GetSortedHypotheses(const Word &constituentLabel) con
{
std::map<Word, ChartHypothesisCollection>::const_iterator
iter = m_hypoColl.find(constituentLabel);
- assert(iter != m_hypoColl.end());
+ CHECK(iter != m_hypoColl.end());
return iter->second.GetSortedHypotheses();
}
@@ -117,7 +117,7 @@ void ChartCell::ProcessSentence(const ChartTranslationOptionList &transOptList
void ChartCell::SortHypotheses()
{
// sort each mini cells & fill up target lhs list
- assert(m_targetLabelSet.Empty());
+ CHECK(m_targetLabelSet.Empty());
std::map<Word, ChartHypothesisCollection>::iterator iter;
for (iter = m_hypoColl.begin(); iter != m_hypoColl.end(); ++iter) {
ChartHypothesisCollection &coll = iter->second;
@@ -135,7 +135,7 @@ const ChartHypothesis *ChartCell::GetBestHypothesis() const
std::map<Word, ChartHypothesisCollection>::const_iterator iter;
for (iter = m_hypoColl.begin(); iter != m_hypoColl.end(); ++iter) {
const HypoList &sortedList = iter->second.GetSortedHypotheses();
- assert(sortedList.size() > 0);
+ CHECK(sortedList.size() > 0);
const ChartHypothesis *hypo = sortedList[0];
if (hypo->GetTotalScore() > bestScore) {
diff --git a/moses/src/ChartCell.h b/moses/src/ChartCell.h
index 4ea148ff1..ab9e5b1ea 100644
--- a/moses/src/ChartCell.h
+++ b/moses/src/ChartCell.h
@@ -72,7 +72,7 @@ public:
const ChartHypothesis *GetBestHypothesis() const;
const ChartCellLabel &GetSourceWordLabel() const {
- assert(m_coverage.GetNumWordsCovered() == 1);
+ CHECK(m_coverage.GetNumWordsCovered() == 1);
return *m_sourceWordLabel;
}
diff --git a/moses/src/ChartHypothesis.cpp b/moses/src/ChartHypothesis.cpp
index cd3448f43..9c618d73d 100644
--- a/moses/src/ChartHypothesis.cpp
+++ b/moses/src/ChartHypothesis.cpp
@@ -108,7 +108,7 @@ void ChartHypothesis::CreateOutputPhrase(Phrase &outPhrase) const
/** Return full output phrase */
Phrase ChartHypothesis::GetOutputPhrase() const
{
- Phrase outPhrase(Output, ARRAY_SIZE_INCR);
+ Phrase outPhrase(ARRAY_SIZE_INCR);
CreateOutputPhrase(outPhrase);
return outPhrase;
}
diff --git a/moses/src/ChartHypothesisCollection.cpp b/moses/src/ChartHypothesisCollection.cpp
index 531a60b20..48a108aab 100644
--- a/moses/src/ChartHypothesisCollection.cpp
+++ b/moses/src/ChartHypothesisCollection.cpp
@@ -73,7 +73,7 @@ bool ChartHypothesisCollection::AddHypothesis(ChartHypothesis *hypo, ChartManage
// equiv hypo exists, recombine with other hypo
HCType::iterator &iterExisting = addRet.first;
ChartHypothesis *hypoExisting = *iterExisting;
- assert(iterExisting != m_hypos.end());
+ CHECK(iterExisting != m_hypos.end());
//StaticData::Instance().GetSentenceStats().AddRecombination(*hypo, **iterExisting);
@@ -225,7 +225,7 @@ void ChartHypothesisCollection::PruneToSize(ChartManager &manager)
for (iter = hyposOrdered.begin() + (m_maxHypoStackSize * 2); iter != hyposOrdered.end(); ++iter) {
ChartHypothesis *hypo = *iter;
HCType::iterator iterFindHypo = m_hypos.find(hypo);
- assert(iterFindHypo != m_hypos.end());
+ CHECK(iterFindHypo != m_hypos.end());
Remove(iterFindHypo);
}
}
@@ -234,7 +234,7 @@ void ChartHypothesisCollection::PruneToSize(ChartManager &manager)
void ChartHypothesisCollection::SortHypotheses()
{
- assert(m_hyposOrdered.empty());
+ CHECK(m_hyposOrdered.empty());
if (!m_hypos.empty()) {
// done everything for this cell.
// sort
diff --git a/moses/src/ChartHypothesisCollection.h b/moses/src/ChartHypothesisCollection.h
index 059686854..ebf1c6002 100644
--- a/moses/src/ChartHypothesisCollection.h
+++ b/moses/src/ChartHypothesisCollection.h
@@ -44,10 +44,10 @@ public:
// assert in same cell
const WordsRange &rangeA = hypoA->GetCurrSourceRange()
, &rangeB = hypoB->GetCurrSourceRange();
- assert(rangeA == rangeB);
+ CHECK(rangeA == rangeB);
// shouldn't be mixing hypos with different lhs
- assert(hypoA->GetTargetLHS() == hypoB->GetTargetLHS());
+ CHECK(hypoA->GetTargetLHS() == hypoB->GetTargetLHS());
int ret = hypoA->RecombineCompare(*hypoB);
if (ret != 0)
diff --git a/moses/src/ChartManager.cpp b/moses/src/ChartManager.cpp
index 7bcaf80af..b2ed3a435 100644
--- a/moses/src/ChartManager.cpp
+++ b/moses/src/ChartManager.cpp
@@ -198,7 +198,7 @@ void ChartManager::CalcNBest(size_t count, ChartTrellisPathList &ret,bool onlyDi
++i) {
// Get the best detour from the queue.
std::auto_ptr<const ChartTrellisDetour> detour(contenders.Pop());
- assert(detour.get());
+ CHECK(detour.get());
// Create a full base path from the chosen detour.
basePath.reset(new ChartTrellisPath(*detour));
@@ -206,7 +206,7 @@ void ChartManager::CalcNBest(size_t count, ChartTrellisPathList &ret,bool onlyDi
// Generate new detours from this base path and add them to the queue of
// contenders. The new detours deviate from the base path by a single
// replacement along the previous detour sub-path.
- assert(basePath->GetDeviationPoint());
+ CHECK(basePath->GetDeviationPoint());
CreateDeviantPaths(basePath, *(basePath->GetDeviationPoint()), contenders);
// If the n-best list is allowed to contain duplicate translations (at the
diff --git a/moses/src/ChartRuleLookupManagerMemory.cpp b/moses/src/ChartRuleLookupManagerMemory.cpp
index 6b8fc25e5..aab3dfcce 100644
--- a/moses/src/ChartRuleLookupManagerMemory.cpp
+++ b/moses/src/ChartRuleLookupManagerMemory.cpp
@@ -38,7 +38,7 @@ ChartRuleLookupManagerMemory::ChartRuleLookupManagerMemory(
: ChartRuleLookupManager(src, cellColl)
, m_ruleTable(ruleTable)
{
- assert(m_dottedRuleColls.size() == 0);
+ CHECK(m_dottedRuleColls.size() == 0);
size_t sourceSize = src.GetSize();
m_dottedRuleColls.resize(sourceSize);
diff --git a/moses/src/ChartRuleLookupManagerOnDisk.cpp b/moses/src/ChartRuleLookupManagerOnDisk.cpp
index 48933474f..b8c0674f5 100644
--- a/moses/src/ChartRuleLookupManagerOnDisk.cpp
+++ b/moses/src/ChartRuleLookupManagerOnDisk.cpp
@@ -25,7 +25,7 @@
#include "StaticData.h"
#include "DotChartOnDisk.h"
#include "ChartTranslationOptionList.h"
-#include "../../OnDiskPt/src/TargetPhraseCollection.h"
+#include "../../OnDiskPt/TargetPhraseCollection.h"
using namespace std;
@@ -53,7 +53,7 @@ ChartRuleLookupManagerOnDisk::ChartRuleLookupManagerOnDisk(
, m_weight(weight)
, m_filePath(filePath)
{
- assert(m_expandableDottedRuleListVec.size() == 0);
+ CHECK(m_expandableDottedRuleListVec.size() == 0);
size_t sourceSize = sentence.GetSize();
m_expandableDottedRuleListVec.resize(sourceSize);
@@ -258,7 +258,7 @@ void ChartRuleLookupManagerOnDisk::GetChartRuleCollection(
targetPhraseCollection = iterCache->second;
}
- assert(targetPhraseCollection);
+ CHECK(targetPhraseCollection);
if (!targetPhraseCollection->IsEmpty()) {
outColl.Add(*targetPhraseCollection, prevDottedRule,
GetCellCollection(), adhereTableLimit, rulesLimit);
diff --git a/moses/src/ChartRuleLookupManagerOnDisk.h b/moses/src/ChartRuleLookupManagerOnDisk.h
index 5ab2fb74e..ba87dbbf8 100644
--- a/moses/src/ChartRuleLookupManagerOnDisk.h
+++ b/moses/src/ChartRuleLookupManagerOnDisk.h
@@ -21,7 +21,7 @@
#ifndef moses_ChartRuleLookupManagerOnDisk_h
#define moses_ChartRuleLookupManagerOnDisk_h
-#include "../../OnDiskPt/src/OnDiskWrapper.h"
+#include "../../OnDiskPt/OnDiskWrapper.h"
#include "ChartRuleLookupManager.h"
#include "ChartTranslationOptionList.h"
diff --git a/moses/src/ChartTranslationOption.cpp b/moses/src/ChartTranslationOption.cpp
index 27084f30b..6aa04d189 100644
--- a/moses/src/ChartTranslationOption.cpp
+++ b/moses/src/ChartTranslationOption.cpp
@@ -43,7 +43,7 @@ void ChartTranslationOption::CalcEstimateOfBestScore(
// add the score of the best underlying hypothesis
const ChartCellLabel &cellLabel = rule->GetChartCellLabel();
const ChartHypothesisCollection *hypoColl = cellLabel.GetStack();
- assert(hypoColl);
+ CHECK(hypoColl);
m_estimateOfBestScore += hypoColl->GetBestScore();
}
rule = rule->GetPrev();
diff --git a/moses/src/ChartTranslationOption.h b/moses/src/ChartTranslationOption.h
index 9b2df13de..116eff037 100644
--- a/moses/src/ChartTranslationOption.h
+++ b/moses/src/ChartTranslationOption.h
@@ -24,7 +24,7 @@
#include "TargetPhraseCollection.h"
#include "WordsRange.h"
-#include <cassert>
+#include "util/check.hh"
#include <vector>
namespace Moses
diff --git a/moses/src/ChartTranslationOptionCollection.cpp b/moses/src/ChartTranslationOptionCollection.cpp
index dea904d57..8f06656b2 100644
--- a/moses/src/ChartTranslationOptionCollection.cpp
+++ b/moses/src/ChartTranslationOptionCollection.cpp
@@ -19,7 +19,7 @@
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include "ChartTranslationOptionCollection.h"
#include "ChartCellCollection.h"
#include "InputType.h"
@@ -77,12 +77,12 @@ void ChartTranslationOptionCollection::CreateTranslationOptionsForRange(
ChartTranslationOptionList &chartRuleColl = GetTranslationOptionList(startPos, endPos);
const WordsRange &wordsRange = chartRuleColl.GetSourceRange();
- assert(m_decodeGraphList.size() == m_ruleLookupManagers.size());
+ CHECK(m_decodeGraphList.size() == m_ruleLookupManagers.size());
std::vector <DecodeGraph*>::const_iterator iterDecodeGraph;
std::vector <ChartRuleLookupManager*>::const_iterator iterRuleLookupManagers = m_ruleLookupManagers.begin();
for (iterDecodeGraph = m_decodeGraphList.begin(); iterDecodeGraph != m_decodeGraphList.end(); ++iterDecodeGraph, ++iterRuleLookupManagers) {
const DecodeGraph &decodeGraph = **iterDecodeGraph;
- assert(decodeGraph.GetSize() == 1);
+ CHECK(decodeGraph.GetSize() == 1);
ChartRuleLookupManager &ruleLookupManager = **iterRuleLookupManagers;
size_t maxSpan = decodeGraph.GetMaxChartSpan();
if (maxSpan == 0 || (endPos-startPos+1) <= maxSpan) {
@@ -125,7 +125,7 @@ void ChartTranslationOptionCollection::ProcessUnknownWord(size_t startPos, size_
ruleLookupManager.GetChartRuleCollection(wordsRange, false, fullList);
}
}
- assert(iterRuleLookupManagers == m_ruleLookupManagers.end());
+ CHECK(iterRuleLookupManagers == m_ruleLookupManagers.end());
bool alwaysCreateDirectTranslationOption = StaticData::Instance().IsAlwaysCreateDirectTranslationOption();
// create unknown words for 1 word coverage where we don't have any trans options
@@ -137,13 +137,13 @@ void ChartTranslationOptionCollection::ProcessUnknownWord(size_t startPos, size_
ChartTranslationOptionList &ChartTranslationOptionCollection::GetTranslationOptionList(size_t startPos, size_t endPos)
{
size_t sizeVec = m_collection[startPos].size();
- assert(endPos-startPos < sizeVec);
+ CHECK(endPos-startPos < sizeVec);
return m_collection[startPos][endPos - startPos];
}
const ChartTranslationOptionList &ChartTranslationOptionCollection::GetTranslationOptionList(size_t startPos, size_t endPos) const
{
size_t sizeVec = m_collection[startPos].size();
- assert(endPos-startPos < sizeVec);
+ CHECK(endPos-startPos < sizeVec);
return m_collection[startPos][endPos - startPos];
}
@@ -197,7 +197,7 @@ void ChartTranslationOptionCollection::ProcessOneUnknownWord(const Word &sourceW
// modify the starting bitmap
}
- Phrase* m_unksrc = new Phrase(Input, 1);
+ Phrase* m_unksrc = new Phrase(1);
m_unksrc->AddWord() = sourceWord;
m_unksrcs.push_back(m_unksrc);
@@ -221,7 +221,7 @@ void ChartTranslationOptionCollection::ProcessOneUnknownWord(const Word &sourceW
Word targetLHS(true);
targetLHS.CreateFromString(Output, staticData.GetOutputFactorOrder(), targetLHSStr, true);
- assert(targetLHS.GetFactor(0) != NULL);
+ CHECK(targetLHS.GetFactor(0) != NULL);
// add to dictionary
TargetPhrase *targetPhrase = new TargetPhrase(Output);
@@ -264,7 +264,7 @@ void ChartTranslationOptionCollection::ProcessOneUnknownWord(const Word &sourceW
Word targetLHS(true);
targetLHS.CreateFromString(Output, staticData.GetOutputFactorOrder(), targetLHSStr, true);
- assert(targetLHS.GetFactor(0) != NULL);
+ CHECK(targetLHS.GetFactor(0) != NULL);
targetPhrase->SetSourcePhrase(*m_unksrc);
targetPhrase->SetScore(unknownWordPenaltyProducer, unknownScore);
diff --git a/moses/src/ChartTranslationOptionList.cpp b/moses/src/ChartTranslationOptionList.cpp
index 100db79b4..cb1b8e256 100644
--- a/moses/src/ChartTranslationOptionList.cpp
+++ b/moses/src/ChartTranslationOptionList.cpp
@@ -102,7 +102,7 @@ void ChartTranslationOptionList::Add(const TargetPhraseCollection &targetPhraseC
void ChartTranslationOptionList::Add(ChartTranslationOption *transOpt)
{
- assert(transOpt);
+ CHECK(transOpt);
m_collection.push_back(transOpt);
}
diff --git a/moses/src/ChartTrellisNode.cpp b/moses/src/ChartTrellisNode.cpp
index 725886c68..ce3c9eaf7 100644
--- a/moses/src/ChartTrellisNode.cpp
+++ b/moses/src/ChartTrellisNode.cpp
@@ -76,7 +76,7 @@ ChartTrellisNode::~ChartTrellisNode()
Phrase ChartTrellisNode::GetOutputPhrase() const
{
// exactly like same fn in hypothesis, but use trellis nodes instead of prevHypos pointer
- Phrase ret(Output, ARRAY_SIZE_INCR);
+ Phrase ret(ARRAY_SIZE_INCR);
const ChartTranslationOption &transOpt = m_hypo.GetTranslationOption();
@@ -103,7 +103,7 @@ Phrase ChartTrellisNode::GetOutputPhrase() const
void ChartTrellisNode::CreateChildren()
{
- assert(m_children.empty());
+ CHECK(m_children.empty());
const std::vector<const ChartHypothesis*> &prevHypos = m_hypo.GetPrevHypos();
m_children.reserve(prevHypos.size());
for (size_t ind = 0; ind < prevHypos.size(); ++ind) {
@@ -118,7 +118,7 @@ void ChartTrellisNode::CreateChildren(const ChartTrellisNode &rootNode,
const ChartHypothesis &replacementHypo,
ChartTrellisNode *&deviationPoint)
{
- assert(m_children.empty());
+ CHECK(m_children.empty());
const NodeChildren &children = rootNode.GetChildren();
m_children.reserve(children.size());
for (size_t ind = 0; ind < children.size(); ++ind) {
diff --git a/moses/src/ChartTrellisPath.cpp b/moses/src/ChartTrellisPath.cpp
index de8803da3..231d4237a 100644
--- a/moses/src/ChartTrellisPath.cpp
+++ b/moses/src/ChartTrellisPath.cpp
@@ -42,7 +42,7 @@ ChartTrellisPath::ChartTrellisPath(const ChartTrellisDetour &detour)
, m_scoreBreakdown(detour.GetBasePath().m_scoreBreakdown)
, m_totalScore(0)
{
- assert(m_deviationPoint);
+ CHECK(m_deviationPoint);
ScoreComponentCollection scoreChange;
scoreChange = detour.GetReplacementHypo().GetScoreBreakdown();
scoreChange.MinusEquals(detour.GetSubstitutedNode().GetHypothesis().GetScoreBreakdown());
diff --git a/moses/src/ConfusionNet.cpp b/moses/src/ConfusionNet.cpp
index 911ca2836..8ff119497 100644
--- a/moses/src/ConfusionNet.cpp
+++ b/moses/src/ConfusionNet.cpp
@@ -247,7 +247,7 @@ ConfusionNet::CreateTranslationOptionCollection(const TranslationSystem* system)
size_t maxNoTransOptPerCoverage = StaticData::Instance().GetMaxNoTransOptPerCoverage();
float translationOptionThreshold = StaticData::Instance().GetTranslationOptionThreshold();
TranslationOptionCollection *rv= new TranslationOptionCollectionConfusionNet(system, *this, maxNoTransOptPerCoverage, translationOptionThreshold);
- assert(rv);
+ CHECK(rv);
return rv;
}
diff --git a/moses/src/ConfusionNet.h b/moses/src/ConfusionNet.h
index 4bbb893de..25b5a021d 100644
--- a/moses/src/ConfusionNet.h
+++ b/moses/src/ConfusionNet.h
@@ -39,7 +39,7 @@ public:
}
const Column& GetColumn(size_t i) const {
- assert(i<data.size());
+ CHECK(i<data.size());
return data[i];
}
const Column& operator[](size_t i) const {
@@ -69,7 +69,7 @@ public:
TranslationOptionCollection* CreateTranslationOptionCollection(const TranslationSystem* system) const;
const NonTerminalSet &GetLabelSet(size_t /*startPos*/, size_t /*endPos*/) const {
- assert(false);
+ CHECK(false);
return *(new NonTerminalSet());
}
diff --git a/moses/src/DecodeGraph.h b/moses/src/DecodeGraph.h
index f899fbf0e..770cb9958 100644
--- a/moses/src/DecodeGraph.h
+++ b/moses/src/DecodeGraph.h
@@ -23,7 +23,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#ifndef moses_DecodeGraph_h
#define moses_DecodeGraph_h
-#include <cassert>
+#include "util/check.hh"
#include <list>
#include <iterator>
#include "TypeDef.h"
@@ -78,7 +78,7 @@ public:
}
size_t GetMaxChartSpan() const {
- assert(m_maxChartSpan != NOT_FOUND);
+ CHECK(m_maxChartSpan != NOT_FOUND);
return m_maxChartSpan;
}
diff --git a/moses/src/DecodeStep.h b/moses/src/DecodeStep.h
index 4bf310fd9..4d2f2280b 100644
--- a/moses/src/DecodeStep.h
+++ b/moses/src/DecodeStep.h
@@ -22,7 +22,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#ifndef moses_DecodeStep_h
#define moses_DecodeStep_h
-#include <cassert>
+#include "util/check.hh"
#include "TypeDef.h"
#include "Dictionary.h"
diff --git a/moses/src/DecodeStepGeneration.cpp b/moses/src/DecodeStepGeneration.cpp
index 94427bff7..0f0c5c2dc 100644
--- a/moses/src/DecodeStepGeneration.cpp
+++ b/moses/src/DecodeStepGeneration.cpp
@@ -149,7 +149,7 @@ void DecodeStepGeneration::Process(const TranslationSystem* system
}
// merge with existing trans opt
- Phrase genPhrase(Output, mergeWords);
+ Phrase genPhrase( mergeWords);
TranslationOption *newTransOpt = MergeGeneration(inputPartialTranslOpt, genPhrase, generationScore);
if (newTransOpt != NULL) {
outputPartialTranslOptColl.Add(system, newTransOpt);
diff --git a/moses/src/DotChartInMemory.h b/moses/src/DotChartInMemory.h
index 2854a5d0a..fe7ab64c5 100644
--- a/moses/src/DotChartInMemory.h
+++ b/moses/src/DotChartInMemory.h
@@ -26,7 +26,7 @@
#include "DotChart.h"
#include "PhraseDictionaryNodeSCFG.h"
-#include <cassert>
+#include "util/check.hh"
#include <vector>
namespace Moses
@@ -96,7 +96,7 @@ public:
}
void Add(size_t pos, const DottedRuleInMemory *dottedRule) {
- assert(dottedRule);
+ CHECK(dottedRule);
m_coll[pos].push_back(dottedRule);
if (!dottedRule->GetLastNode().IsLeaf()) {
m_expandableDottedRuleList.push_back(dottedRule);
diff --git a/moses/src/DotChartOnDisk.cpp b/moses/src/DotChartOnDisk.cpp
index 5144cfc7c..1674f0e21 100644
--- a/moses/src/DotChartOnDisk.cpp
+++ b/moses/src/DotChartOnDisk.cpp
@@ -20,7 +20,7 @@
#include <algorithm>
#include "DotChartOnDisk.h"
#include "Util.h"
-#include "../../OnDiskPt/src/PhraseNode.h"
+#include "../../OnDiskPt/PhraseNode.h"
using namespace std;
diff --git a/moses/src/DotChartOnDisk.h b/moses/src/DotChartOnDisk.h
index ddf20c2f1..4473b7685 100644
--- a/moses/src/DotChartOnDisk.h
+++ b/moses/src/DotChartOnDisk.h
@@ -20,7 +20,7 @@
#pragma once
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include "DotChart.h"
@@ -108,7 +108,7 @@ class SavedNodeOnDisk
public:
SavedNodeOnDisk(const DottedRuleOnDisk *dottedRule)
:m_dottedRule(dottedRule) {
- assert(m_dottedRule);
+ CHECK(m_dottedRule);
}
~SavedNodeOnDisk() {
@@ -164,7 +164,7 @@ public:
}
void Add(size_t pos, const DottedRuleOnDisk *dottedRule) {
- assert(dottedRule);
+ CHECK(dottedRule);
m_coll[pos]->Add(dottedRule);
m_savedNode.push_back(new SavedNodeOnDisk(dottedRule));
diff --git a/moses/src/DummyScoreProducers.cpp b/moses/src/DummyScoreProducers.cpp
index fc8f670aa..5191fd75b 100644
--- a/moses/src/DummyScoreProducers.cpp
+++ b/moses/src/DummyScoreProducers.cpp
@@ -1,6 +1,6 @@
// $Id$
-#include <cassert>
+#include "util/check.hh"
#include "FFState.h"
#include "StaticData.h"
#include "DummyScoreProducers.h"
diff --git a/moses/src/DummyScoreProducers.h b/moses/src/DummyScoreProducers.h
index f399f1165..058e8f22d 100644
--- a/moses/src/DummyScoreProducers.h
+++ b/moses/src/DummyScoreProducers.h
@@ -32,7 +32,7 @@ public:
const ChartHypothesis&,
int /* featureID */,
ScoreComponentCollection*) const {
- assert(0); // feature function not valid in chart decoder
+ CHECK(0); // feature function not valid in chart decoder
return NULL;
}
};
diff --git a/moses/src/DynSAInclude/RandLMCache.h b/moses/src/DynSAInclude/RandLMCache.h
index 3f38cae02..3f38cae02 100755..100644
--- a/moses/src/DynSAInclude/RandLMCache.h
+++ b/moses/src/DynSAInclude/RandLMCache.h
diff --git a/moses/src/DynSAInclude/RandLMFilter.h b/moses/src/DynSAInclude/RandLMFilter.h
index fe64e23aa..556bbe44a 100755..100644
--- a/moses/src/DynSAInclude/RandLMFilter.h
+++ b/moses/src/DynSAInclude/RandLMFilter.h
@@ -37,15 +37,15 @@ namespace randlm {
// number of bits in T
cell_width_ = sizeof(T) << 3;
// current implementation has following constraints
- assert(cell_width_ > 0 && cell_width_ <= 64 && cell_width_ >= width);
+ CHECK(cell_width_ > 0 && cell_width_ <= 64 && cell_width_ >= width);
// used for >> division
log_cell_width_ = static_cast<int>(floor(log(cell_width_)/log(2) + 0.000001));
// size of underlying data in Ts
cells_ = ((addresses * width) + cell_width_ - 1) >> log_cell_width_;
// instantiate underlying data
data_ = new T[cells_];
- assert(data_ != NULL);
- assert(reset());
+ CHECK(data_ != NULL);
+ CHECK(reset());
// 'first_bit' marks the first bit used by 'address' (left padded with zeros).
first_bit_ = (width % cell_width_ == 0) ? 0 : cell_width_ - (width % cell_width_);
// mask for full cell
@@ -54,9 +54,9 @@ namespace randlm {
address_mask_ = full_mask_ >> first_bit_;
}
Filter(FileHandler* fin, bool loaddata = true) : data_(NULL) {
- assert(loadHeader(fin));
+ CHECK(loadHeader(fin));
if (loaddata)
- assert(loadData(fin));
+ CHECK(loadData(fin));
}
virtual ~Filter() {
delete[] data_;
@@ -72,7 +72,7 @@ namespace randlm {
}
// read / write functions
inline bool read(uint64_t address, T* value) {
- assert(address <= addresses_);
+ CHECK(address <= addresses_);
// copy address to 'value'
uint64_t data_bit = address * width_;
uint32_t data_cell = (data_bit >> log_cell_width_); // % cells_;
@@ -94,7 +94,7 @@ namespace randlm {
return true;
}
inline T read(uint64_t address) {
- assert(address <= addresses_);
+ CHECK(address <= addresses_);
// return value at address
T value = 0;
uint64_t data_bit = address * width_;
@@ -116,8 +116,8 @@ namespace randlm {
return value;
}
inline bool write(uint64_t address, T value) {
- assert(address <= addresses_);
- assert(log2(value) <= width_);
+ CHECK(address <= addresses_);
+ CHECK(log2(value) <= width_);
// write 'value' to address
uint64_t data_bit = address * width_;
uint32_t data_cell = (data_bit >> log_cell_width_); // % cells_;
@@ -207,50 +207,50 @@ namespace randlm {
int getCellWidth() { return cell_width_; }
uint32_t getCells() { return cells_; }
virtual bool save(FileHandler* out) {
- assert(out != NULL);
- assert(out->write((char*)&cells_, sizeof(cells_)));
- assert(out->write((char*)&cell_width_, sizeof(cell_width_)));
- assert(out->write((char*)&log_cell_width_, sizeof(log_cell_width_)));
- assert(out->write((char*)&addresses_, sizeof(addresses_)));
- assert(out->write((char*)&width_, sizeof(width_)));
- assert(out->write((char*)&first_bit_, sizeof(first_bit_)));
- assert(out->write((char*)&full_mask_, sizeof(full_mask_)));
- assert(out->write((char*)&address_mask_, sizeof(address_mask_)));
- //assert(out->write((char*)data_, cells_ * sizeof(T)));
+ CHECK(out != NULL);
+ CHECK(out->write((char*)&cells_, sizeof(cells_)));
+ CHECK(out->write((char*)&cell_width_, sizeof(cell_width_)));
+ CHECK(out->write((char*)&log_cell_width_, sizeof(log_cell_width_)));
+ CHECK(out->write((char*)&addresses_, sizeof(addresses_)));
+ CHECK(out->write((char*)&width_, sizeof(width_)));
+ CHECK(out->write((char*)&first_bit_, sizeof(first_bit_)));
+ CHECK(out->write((char*)&full_mask_, sizeof(full_mask_)));
+ CHECK(out->write((char*)&address_mask_, sizeof(address_mask_)));
+ //CHECK(out->write((char*)data_, cells_ * sizeof(T)));
const uint64_t jump = 524288032ul; //(uint64_t)pow(2, 29);
if((width_ == 1) || cells_ < jump)
- assert(out->write((char*)data_, cells_ * sizeof(T)));
+ CHECK(out->write((char*)data_, cells_ * sizeof(T)));
else {
uint64_t idx(0);
while(idx + jump < cells_) {
- assert(out->write((char*)&data_[idx], jump * sizeof(T)));
+ CHECK(out->write((char*)&data_[idx], jump * sizeof(T)));
idx += jump;
}
- assert(out->write((char*)&data_[idx], (cells_ - idx) * sizeof(T)));
+ CHECK(out->write((char*)&data_[idx], (cells_ - idx) * sizeof(T)));
}
return true;
}
protected:
bool loadHeader(FileHandler* fin) {
- assert(fin != NULL);
- assert(fin->read((char*)&cells_, sizeof(cells_)));
- assert(fin->read((char*)&cell_width_, sizeof(cell_width_)));
- assert(cell_width_ == sizeof(T) << 3); // make sure correct underlying data type
- assert(fin->read((char*)&log_cell_width_, sizeof(log_cell_width_)));
- assert(fin->read((char*)&addresses_, sizeof(addresses_)));
- assert(fin->read((char*)&width_, sizeof(width_)));
- assert(fin->read((char*)&first_bit_, sizeof(first_bit_)));
- assert(fin->read((char*)&full_mask_, sizeof(full_mask_)));
- assert(fin->read((char*)&address_mask_, sizeof(address_mask_)));
+ CHECK(fin != NULL);
+ CHECK(fin->read((char*)&cells_, sizeof(cells_)));
+ CHECK(fin->read((char*)&cell_width_, sizeof(cell_width_)));
+ CHECK(cell_width_ == sizeof(T) << 3); // make sure correct underlying data type
+ CHECK(fin->read((char*)&log_cell_width_, sizeof(log_cell_width_)));
+ CHECK(fin->read((char*)&addresses_, sizeof(addresses_)));
+ CHECK(fin->read((char*)&width_, sizeof(width_)));
+ CHECK(fin->read((char*)&first_bit_, sizeof(first_bit_)));
+ CHECK(fin->read((char*)&full_mask_, sizeof(full_mask_)));
+ CHECK(fin->read((char*)&address_mask_, sizeof(address_mask_)));
return true;
}
bool loadData(FileHandler* fin) {
// instantiate underlying array
data_ = new T[cells_];
- assert(data_ != NULL);
- assert(fin->read((char*)data_, cells_ * sizeof(T)));
- //assert(fin->read((char*)&data_[0], ceil(float(cells_) / 2.0) * sizeof(T)));
- //assert(fin->read((char*)&data_[cells_ / 2], (cells_ / 2) * sizeof(T)));
+ CHECK(data_ != NULL);
+ CHECK(fin->read((char*)data_, cells_ * sizeof(T)));
+ //CHECK(fin->read((char*)&data_[0], ceil(float(cells_) / 2.0) * sizeof(T)));
+ //CHECK(fin->read((char*)&data_[cells_ / 2], (cells_ / 2) * sizeof(T)));
return true;
}
uint64_t cells_; // number T making up 'data_'
@@ -271,7 +271,7 @@ namespace randlm {
BitFilter(FileHandler* fin, bool loaddata = true)
: Filter<uint8_t>(fin, loaddata) {
if (loaddata)
- assert(load(fin));
+ CHECK(load(fin));
}
// TODO: overload operator[]
virtual bool testBit(uint64_t location) {
@@ -289,7 +289,7 @@ namespace randlm {
return true;
}
bool save(FileHandler* fout) {
- assert(Filter<uint8_t>::save(fout));
+ CHECK(Filter<uint8_t>::save(fout));
std::cerr << "Saved BitFilter. Rho = " << rho() << "." << std::endl;;
return true;
}
@@ -316,10 +316,10 @@ namespace randlm {
class ResizedBitFilter : public BitFilter {
public:
ResizedBitFilter(FileHandler* fin) : BitFilter(fin) {
- assert(load(fin));
+ CHECK(load(fin));
}
ResizedBitFilter(FileHandler* fin, uint64_t newsize) : BitFilter(newsize) {
- assert(resizeFromFile(fin, newsize));
+ CHECK(resizeFromFile(fin, newsize));
}
bool resizeFromFile(FileHandler* oldin, uint64_t newsize);
virtual bool testBit(uint64_t location) {
@@ -332,18 +332,18 @@ namespace randlm {
}
bool save(FileHandler* fout) {
// re-hashing parameters
- assert(BitFilter::save(fout));
+ CHECK(BitFilter::save(fout));
std::cerr << "Saved ResizedBitFilter. Rho = " << rho() << "." << std::endl;
- assert(fout->write((char*)&old_addresses_, sizeof(old_addresses_)));
- assert(fout->write((char*)&a_, sizeof(a_)));
+ CHECK(fout->write((char*)&old_addresses_, sizeof(old_addresses_)));
+ CHECK(fout->write((char*)&a_, sizeof(a_)));
return fout->write((char*)&b_, sizeof(b_));
}
protected:
bool load(FileHandler* fin) {
// re-hashing parameters
std::cerr << "Loaded ResizedBitFilter. Rho = " << rho() << "." << std::endl;
- assert(fin->read((char*)&old_addresses_, sizeof(old_addresses_)));
- assert(fin->read((char*)&a_, sizeof(a_)));
+ CHECK(fin->read((char*)&old_addresses_, sizeof(old_addresses_)));
+ CHECK(fin->read((char*)&a_, sizeof(a_)));
return fin->read((char*)&b_, sizeof(b_));
}
// member data
@@ -360,7 +360,7 @@ namespace randlm {
CountingFilter(uint64_t addresses, int width, bool wrap_around = true) :
Filter<T>(addresses, width), wrap_around_(wrap_around) {}
CountingFilter(FileHandler* fin) : Filter<T>(fin, true) {
- assert(load(fin));
+ CHECK(load(fin));
}
~CountingFilter() {}
// increment this address by one
@@ -384,11 +384,11 @@ namespace randlm {
return true;
// wrapped round so check whether need to reset to max count
if (!wrap_around_)
- assert(this->write(address, this->address_mask_));
+ CHECK(this->write(address, this->address_mask_));
return false; // false to indicate that overflowed
}
bool save(FileHandler* fout) {
- assert(Filter<T>::save(fout));
+ CHECK(Filter<T>::save(fout));
return fout->write((char*)&wrap_around_, sizeof(wrap_around_));
}
private:
diff --git a/moses/src/DynSAInclude/file.cpp b/moses/src/DynSAInclude/file.cpp
index 70c45ca77..d2901c1ae 100644
--- a/moses/src/DynSAInclude/file.cpp
+++ b/moses/src/DynSAInclude/file.cpp
@@ -24,7 +24,7 @@ FileHandler::FileHandler(const std::string & path, std::ios_base::openmode flags
exit(EXIT_FAILURE);
} else {
bool ret = setStreamBuffer(flags & std::ios::in);
- assert(ret);
+ CHECK(ret);
}
this->precision(32);
}
@@ -59,11 +59,11 @@ bool FileHandler::setStreamBuffer(bool checkExists)
{
// redirect stdin or stdout if necesary
if (path_ == FileHandler::kStdInDescriptor) {
- assert(flags_ & std::ios::in);
+ CHECK(flags_ & std::ios::in);
std::streambuf* sb = std::cin.rdbuf();
buffer_ = sb;
} else if (path_ == FileHandler::kStdOutDescriptor) {
- assert(flags_ & std::ios::out);
+ CHECK(flags_ & std::ios::out);
std::streambuf* sb = std::cout.rdbuf();
buffer_ = sb;
} else {
diff --git a/moses/src/DynSAInclude/file.h b/moses/src/DynSAInclude/file.h
index 97ea6cb52..3157f918b 100644
--- a/moses/src/DynSAInclude/file.h
+++ b/moses/src/DynSAInclude/file.h
@@ -7,7 +7,7 @@
#include <cstdlib>
#include <sys/stat.h>
#include <string>
-#include <cassert>
+#include "util/check.hh"
#include "fdstream.h"
#include "utils.h"
diff --git a/moses/src/DynSAInclude/hash.h b/moses/src/DynSAInclude/hash.h
index f0965d9bd..233d0be5d 100755..100644
--- a/moses/src/DynSAInclude/hash.h
+++ b/moses/src/DynSAInclude/hash.h
@@ -1,7 +1,7 @@
#ifndef INC_ALLHASHFUNCS_H
#define INC_ALLHASHFUNCS_H
-#include <cassert>
+#include "util/check.hh"
#include <cmath>
#include "types.h"
#include "utils.h"
@@ -28,12 +28,12 @@ class HashBase {
virtual T hash(const wordID_t* id, const int len, count_t h)=0; // vocab mapped hashing
count_t size() { return H_;}
virtual void save(FileHandler* fout) {
- assert(fout != 0);
+ CHECK(fout != 0);
fout->write((char*)&m_, sizeof(m_));
fout->write((char*)&H_, sizeof(H_));
}
virtual void load(FileHandler* fin) {
- assert(fin != 0);
+ CHECK(fin != 0);
fin->read((char*)&m_, sizeof(m_));
fin->read((char*)&H_, sizeof(H_));
}
@@ -43,7 +43,7 @@ class UnivHash_linear: public HashBase<T> {
public:
UnivHash_linear(float m, count_t H, P pr):
HashBase<T>(m, H), pr_(pr) {
- //assert(isPrime(pr_));
+ //CHECK(isPrime(pr_));
initSeeds();
}
UnivHash_linear(FileHandler* fin):
@@ -177,7 +177,7 @@ T UnivHash_tableXOR<T>::hash(const char* s, count_t h = 0) {
unsigned char c;
while((c = *s++) && (++pos < MAX_STR_LEN))
value ^= table_[h][idx += c];
- assert(value < this->m_);
+ CHECK(value < this->m_);
return value;
}
@@ -265,7 +265,7 @@ void UnivHash_linear<T>::freeSeeds() {
template <typename T>
inline T UnivHash_linear<T>::hash(const wordID_t* id, const int len,
count_t h=0) {
- assert(h < this->H_);
+ CHECK(h < this->H_);
T value = 0;
int pos(0);
while(pos < len) {
@@ -277,7 +277,7 @@ inline T UnivHash_linear<T>::hash(const wordID_t* id, const int len,
template <typename T>
inline T UnivHash_linear<T>::hash(const wordID_t id, const count_t pos,
const T prevValue, count_t h=0) {
- assert(h < this->H_);
+ CHECK(h < this->H_);
T value = prevValue + ((a_[h][pos] * id) + b_[h][pos]); // % pr_;
return value % this->m_;
}
@@ -315,7 +315,7 @@ void UnivHash_linear<T>::load(FileHandler* fin) {
/*
template <typename T>
T UnivHash_linear<T>::hash(const char* s, count_t h=0) {
- assert(h < this->H_);
+ CHECK(h < this->H_);
T value = 0;
int pos(0);
unsigned char c;
diff --git a/moses/src/DynSAInclude/onlineRLM.h b/moses/src/DynSAInclude/onlineRLM.h
index dd81afa46..adc7934fc 100755..100644
--- a/moses/src/DynSAInclude/onlineRLM.h
+++ b/moses/src/DynSAInclude/onlineRLM.h
@@ -21,7 +21,7 @@ public:
OnlineRLM(uint16_t MBs, int width, int bucketRange, count_t order,
Vocab* v, float qBase = 8): PerfectHash<T>(MBs, width, bucketRange, qBase),
vocab_(v), bAdapting_(false), order_(order), corpusSize_(0), alpha_(0) {
- assert(vocab_ != 0);
+ CHECK(vocab_ != 0);
//instantiate quantizer class here
cache_ = new Cache<float>(8888.8888, 9999.9999); // unknown_value, null_value
alpha_ = new float[order_ + 1];
@@ -137,7 +137,7 @@ int OnlineRLM<T>::query(const wordID_t* IDs, int len) {
value -= ((value & this->hitMask_) != 0) ? this->hitMask_ : 0; // check for previous hit marks
}
else {
- assert(filterIdx < this->cells_);
+ CHECK(filterIdx < this->cells_);
//markQueried(filterIdx);
}
}
@@ -158,12 +158,12 @@ bool OnlineRLM<T>::markPrefix(const wordID_t* IDs, const int len, bool bSet) {
return false;
}
if(filterIndex != this->cells_ + 1) {
- assert(hpdItr == this->dict_.end());
+ CHECK(hpdItr == this->dict_.end());
if(bSet) bPrefix_->setBit(filterIndex); // mark index
else bPrefix_->clearBit(filterIndex); // unset index
}
else {
- assert(filterIndex == this->cells_ + 1);
+ CHECK(filterIndex == this->cells_ + 1);
//how to handle hpd prefixes?
}
if(pfCache.nodes() > 10000) pfCache.clear();
@@ -289,14 +289,14 @@ float OnlineRLM<T>::getProb(const wordID_t* ngram, int len,
logprob = alpha_[len] + oovprob;
break;
case 1: // unigram found only
- assert(in[len - 1] > 0);
+ CHECK(in[len - 1] > 0);
logprob = alpha_[len - 1] + (corpusSize_ > 0 ?
log10(static_cast<float>(in[len - 1]) / static_cast<float>(corpusSize_)) : 0);
//logprob = alpha_[len - 1] +
//log10(static_cast<float>(in[len - 1]) / static_cast<float>(corpusSize_));
break;
default:
- assert(den_val > 0);
+ CHECK(den_val > 0);
//if(subgram == in[len - found]) ++subgram; // avoid returning zero probs????
logprob = alpha_[len - num_fnd] +
log10(static_cast<float>(in[len - num_fnd]) / static_cast<float>(den_val));
@@ -313,7 +313,7 @@ template<typename T>
const void* OnlineRLM<T>::getContext(const wordID_t* ngram, int len) {
int dummy(0);
float* addresses[len]; // only interested in addresses of cache
- assert(cache_->getCache2(ngram, len, &addresses[0], &dummy) == len);
+ CHECK(cache_->getCache2(ngram, len, &addresses[0], &dummy) == len);
// return address of cache node
return (const void*)addresses[0];
}
@@ -391,7 +391,7 @@ void OnlineRLM<T>::load(FileHandler* fin) {
cerr << "Loading ORLM...\n";
// load vocab first
vocab_ = new Vocab(fin);
- assert(vocab_ != 0);
+ CHECK(vocab_ != 0);
fin->read((char*)&corpusSize_, sizeof(corpusSize_));
cerr << "\tCorpus size = " << corpusSize_ << endl;
fin->read((char*)&order_, sizeof(order_));
diff --git a/moses/src/DynSAInclude/params.cpp b/moses/src/DynSAInclude/params.cpp
index c37fcd684..4be3a1676 100755..100644
--- a/moses/src/DynSAInclude/params.cpp
+++ b/moses/src/DynSAInclude/params.cpp
@@ -59,9 +59,9 @@ bool Parameters::loadParams(int argc, char ** argv) {
//if the parameter is of type booL no corresponding value
if( getValueType(param) == kBoolValue ) {
jumpBy = 1;
- assert(setParamValue(param, kTrueValue));
+ CHECK(setParamValue(param, kTrueValue));
} else { //not of type bool so must have corresponding value
- assert(i+1 < argc);
+ CHECK(i+1 < argc);
jumpBy = 2;
std::string val = argv[i+1];
Utils::trim(val);
diff --git a/moses/src/DynSAInclude/params.h b/moses/src/DynSAInclude/params.h
index 33930e536..e0aab6135 100755..100644
--- a/moses/src/DynSAInclude/params.h
+++ b/moses/src/DynSAInclude/params.h
@@ -5,7 +5,7 @@
#include <map>
#include <set>
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include "file.h"
#include "utils.h"
#include "types.h"
diff --git a/moses/src/DynSAInclude/perfectHash.h b/moses/src/DynSAInclude/perfectHash.h
index 2ee2ce156..2e3bfbc8f 100755..100644
--- a/moses/src/DynSAInclude/perfectHash.h
+++ b/moses/src/DynSAInclude/perfectHash.h
@@ -22,7 +22,7 @@ class PerfectHash {
public:
PerfectHash(uint16_t MBs, int width, int bucketRange, float qBase);
PerfectHash(FileHandler* fin) {
- assert(fin != 0);
+ CHECK(fin != 0);
}
virtual ~PerfectHash();
void analyze();
@@ -116,7 +116,7 @@ uint64_t PerfectHash<T>::insert(const wordID_t* IDs, const int len,
}
++index;
}
- assert((emptyidx < index) && (filter_->read(emptyidx) == 0)); // should have found empty index if it gets here
+ CHECK((emptyidx < index) && (filter_->read(emptyidx) == 0)); // should have found empty index if it gets here
T code = (T)qtizer_->code(value);
filter_->write(emptyidx, fp); // insert the fprint
values_->write(emptyidx, code);
@@ -214,8 +214,8 @@ void PerfectHash<T>::remove(const wordID_t* IDs, const int len) {
}
template<typename T> // clear filter index
void PerfectHash<T>::remove(uint64_t index) {
- assert(index < cells_);
- assert(filter_->read(index) != 0); // slow
+ CHECK(index < cells_);
+ CHECK(filter_->read(index) != 0); // slow
filter_->write(index, 0);
values_->write(index, 0);
//reduce bucket size
@@ -255,7 +255,7 @@ count_t PerfectHash<T>::bucketsMemUse() {
}
template<typename T>
void PerfectHash<T>::save(FileHandler* fout) {
- assert(fout != 0);
+ CHECK(fout != 0);
cerr << "\tSaving perfect hash parameters...\n";
fout->write((char*)&hitMask_, sizeof(hitMask_));
fout->write((char*)&memBound_, sizeof(memBound_));
@@ -280,7 +280,7 @@ void PerfectHash<T>::save(FileHandler* fout) {
}
template<typename T>
void PerfectHash<T>::load(FileHandler* fin) {
- assert(fin != 0);
+ CHECK(fin != 0);
cerr << "\tLoading perfect hash parameters...\n";
fin->read((char*)&hitMask_, sizeof(hitMask_));
fin->read((char*)&memBound_, sizeof(memBound_));
diff --git a/moses/src/DynSAInclude/quantizer.h b/moses/src/DynSAInclude/quantizer.h
index e452716e4..c12189615 100755..100644
--- a/moses/src/DynSAInclude/quantizer.h
+++ b/moses/src/DynSAInclude/quantizer.h
@@ -3,7 +3,7 @@
#include <vector>
#include <cmath>
-#include <cassert>
+#include "util/check.hh"
#include <algorithm>
#include "types.h"
@@ -11,7 +11,7 @@ static const float kFloatErr = 0.00001f;
class LogQtizer {
public:
LogQtizer(float i): base_(pow(2, 1 / i)) {
- assert(base_ > 1);
+ CHECK(base_ > 1);
max_code_ = 0;
float value = 1; // code = 1 -> value = 1 for any base
std::vector<float> code_to_value_vec;
@@ -34,12 +34,12 @@ public:
std::cerr << "Initialized quantization (size = " << max_code_ + 1 << ")" << std::endl;
}
LogQtizer(FileHandler* fin) {
- assert(fin != NULL);
+ CHECK(fin != NULL);
load(fin);
}
int code(float value) {
// should just be: return log_b(value)
- assert(!(value < min_value_ || value > max_value_));
+ CHECK(!(value < min_value_ || value > max_value_));
// but binary search removes errors due to floor operator above
int code = static_cast<int>(std::lower_bound(code_to_value_, code_to_value_+ max_code_,
value) - code_to_value_);
diff --git a/moses/src/DynSAInclude/vocab.cpp b/moses/src/DynSAInclude/vocab.cpp
index f0825f5a4..27e052260 100644
--- a/moses/src/DynSAInclude/vocab.cpp
+++ b/moses/src/DynSAInclude/vocab.cpp
@@ -118,7 +118,7 @@ bool Vocab::Load(FileHandler* vcbin, const FactorDirection& direction,
wordID_t id;
void *ret = getline(*vcbin, line);
- assert(ret);
+ CHECK(ret);
std::istringstream first(line.c_str());
uint32_t vcbsize(0);
first >> vcbsize;
@@ -132,7 +132,7 @@ bool Vocab::Load(FileHandler* vcbin, const FactorDirection& direction,
// may be no id (i.e. file may just be a word list)
if (id == 0 && word != GetkOOVWord())
id = m_ids2words.size() + 1; // assign ids sequentially starting from 1
- assert(m_ids2words.count(id) == 0 && m_words2ids.count(word) == 0);
+ CHECK(m_ids2words.count(id) == 0 && m_words2ids.count(word) == 0);
m_ids2words[id] = word;
m_words2ids[word] = id;
}
diff --git a/moses/src/DynSAInclude/vocab.h b/moses/src/DynSAInclude/vocab.h
index cb2d3dac1..467d16fdb 100644
--- a/moses/src/DynSAInclude/vocab.h
+++ b/moses/src/DynSAInclude/vocab.h
@@ -39,7 +39,7 @@ public:
m_kBOSWordID(1) {
InitSpecialWords();
bool ret = Load(vocab_path, direction, factors, closed);
- assert(ret);
+ CHECK(ret);
}
Vocab(FileHandler * fin, const FactorDirection& direction,
const FactorList& factors, bool closed = true):
@@ -47,7 +47,7 @@ public:
m_kBOSWordID(1) {
InitSpecialWords();
bool ret = Load(fin, direction, factors, closed);
- assert(ret);
+ CHECK(ret);
}
Vocab(FileHandler *fin):
m_kOOVWordID(0),
diff --git a/moses/src/DynSuffixArray.cpp b/moses/src/DynSuffixArray.cpp
index 0b446cc1d..f043f5c9b 100644
--- a/moses/src/DynSuffixArray.cpp
+++ b/moses/src/DynSuffixArray.cpp
@@ -109,7 +109,7 @@ void DynSuffixArray::Insert(vuint_t* newSent, unsigned newIndex)
//stage 3...all words of new sentence are inserted backwards
// stage 2: k=ISA[newIndex], tmp= L[k], L[k] = newChar
//PrintAuxArrays();
- assert(newIndex <= m_SA->size());
+ CHECK(newIndex <= m_SA->size());
int k(-1), kprime(-1);
k = (newIndex < m_SA->size() ? m_ISA->at(newIndex) : m_ISA->at(0)); // k is now index of the cycle that starts at newindex
int true_pos = LastFirstFunc(k); // track cycle shift (newIndex - 1)
@@ -161,7 +161,7 @@ void DynSuffixArray::Reorder(unsigned j, unsigned jprime)
//cerr << "j=" << j << "\tj'=" << jprime << endl;
int isaIdx(-1);
int new_j = LastFirstFunc(j);
- assert(j <= jprime);
+ CHECK(j <= jprime);
// for SA and L, the element at pos j is moved to pos j'
m_L->insert(m_L->begin() + jprime + 1, m_L->at(j));
m_L->erase(m_L->begin() + j);
diff --git a/moses/src/FFState.h b/moses/src/FFState.h
index 3bd294e58..209be31b2 100644
--- a/moses/src/FFState.h
+++ b/moses/src/FFState.h
@@ -1,7 +1,7 @@
#ifndef moses_FFState_h
#define moses_FFState_h
-#include <cassert>
+#include "util/check.hh"
#include <vector>
diff --git a/moses/src/FeatureFunction.cpp b/moses/src/FeatureFunction.cpp
index 322d8de6e..ad9db5e1c 100644
--- a/moses/src/FeatureFunction.cpp
+++ b/moses/src/FeatureFunction.cpp
@@ -1,6 +1,6 @@
#include "FeatureFunction.h"
-#include <cassert>
+#include "util/check.hh"
namespace Moses
{
@@ -19,7 +19,7 @@ void StatelessFeatureFunction::Evaluate(
const TargetPhrase& /* cur_hypo */,
ScoreComponentCollection* /* accumulator */) const
{
- assert(!"Please implement Evaluate or set ComputeValueInTranslationOption to true");
+ CHECK(!"Please implement Evaluate or set ComputeValueInTranslationOption to true");
}
bool StatefulFeatureFunction::IsStateless() const
diff --git a/moses/src/File.h b/moses/src/File.h
index 47d25abd4..09d9ddc92 100644
--- a/moses/src/File.h
+++ b/moses/src/File.h
@@ -10,7 +10,7 @@
#include <cstdio>
#include <iostream>
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include "UserMessage.h"
#include "TypeDef.h"
#include "Util.h"
@@ -161,7 +161,7 @@ inline FILE* fOpen(const char* fn,const char* m)
return f;
else {
UserMessage::Add(std::string("ERROR: could not open file ") + fn + " with mode " + m + "\n");
- assert(false);
+ CHECK(false);
return NULL;
}
}
diff --git a/moses/src/FloydWarshall.cpp b/moses/src/FloydWarshall.cpp
index 9e495053b..e63de819b 100644
--- a/moses/src/FloydWarshall.cpp
+++ b/moses/src/FloydWarshall.cpp
@@ -1,4 +1,4 @@
-#include <cassert>
+#include "util/check.hh"
#include <climits>
#include <vector>
@@ -11,7 +11,7 @@ using namespace std;
// All-pairs shortest path algorithm
void floyd_warshall(const std::vector<std::vector<bool> >& edges, std::vector<std::vector<int> >& dist)
{
- assert(edges.size() == edges.front().size());
+ CHECK(edges.size() == edges.front().size());
dist.clear();
dist.resize(edges.size(), std::vector<int>(edges.size(), 0));
diff --git a/moses/src/Hypothesis.cpp b/moses/src/Hypothesis.cpp
index 6dd0db90c..86e2e791b 100644
--- a/moses/src/Hypothesis.cpp
+++ b/moses/src/Hypothesis.cpp
@@ -19,7 +19,7 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <iostream>
#include <limits>
#include <vector>
@@ -100,7 +100,7 @@ Hypothesis::Hypothesis(const Hypothesis &prevHypo, const TranslationOption &tran
{
// assert that we are not extending our hypothesis by retranslating something
// that this hypothesis has already translated!
- assert(!m_sourceCompleted.Overlap(m_currSourceWordsRange));
+ CHECK(!m_sourceCompleted.Overlap(m_currSourceWordsRange));
//_hash_computed = false;
m_sourceCompleted.SetValue(m_currSourceWordsRange.GetStartPos(), m_currSourceWordsRange.GetEndPos(), true);
@@ -321,7 +321,7 @@ float Hypothesis::CalcExpectedScore( const SquareMatrix &futureScore )
t = clock(); // track time excluding LM
}
- assert(!"Need to add code to get the distortion scores");
+ CHECK(!"Need to add code to get the distortion scores");
//CalcDistortionScore();
// LANGUAGE MODEL ESTIMATE (includes word penalty cost)
@@ -345,7 +345,7 @@ void Hypothesis::CalcRemainingScore()
clock_t t=0; // used to track time
// LANGUAGE MODEL COST
- assert(!"Need to add code to get the LM score(s)");
+ CHECK(!"Need to add code to get the LM score(s)");
//CalcLMScore(staticData.GetAllLM());
IFVERBOSE(2) {
diff --git a/moses/src/Hypothesis.h b/moses/src/Hypothesis.h
index 326f1891f..fefd6e2fe 100644
--- a/moses/src/Hypothesis.h
+++ b/moses/src/Hypothesis.h
@@ -168,9 +168,6 @@ public:
std::string GetSourcePhraseStringRep(const std::vector<FactorType> factorsToPrint) const;
std::string GetTargetPhraseStringRep(const std::vector<FactorType> factorsToPrint) const;
- inline const TargetPhrase GetTargetPhrase() const {
- return m_targetPhrase;
- }
std::string GetSourcePhraseStringRep() const;
std::string GetTargetPhraseStringRep() const;
@@ -188,7 +185,7 @@ public:
const Hypothesis *hypo = this;
while (pos < hypo->GetCurrTargetWordsRange().GetStartPos()) {
hypo = hypo->GetPrevHypo();
- assert(hypo != NULL);
+ CHECK(hypo != NULL);
}
return hypo->GetCurrWord(pos - hypo->GetCurrTargetWordsRange().GetStartPos());
}
diff --git a/moses/src/HypothesisStackCubePruning.cpp b/moses/src/HypothesisStackCubePruning.cpp
index 91de8d393..ca54bf944 100644
--- a/moses/src/HypothesisStackCubePruning.cpp
+++ b/moses/src/HypothesisStackCubePruning.cpp
@@ -99,7 +99,7 @@ bool HypothesisStackCubePruning::AddPrune(Hypothesis *hypo)
// equiv hypo exists, recombine with other hypo
iterator &iterExisting = addRet.first;
Hypothesis *hypoExisting = *iterExisting;
- assert(iterExisting != m_hypos.end());
+ CHECK(iterExisting != m_hypos.end());
m_manager.GetSentenceStats().AddRecombination(*hypo, **iterExisting);
@@ -119,7 +119,7 @@ bool HypothesisStackCubePruning::AddPrune(Hypothesis *hypo)
if (!added) {
iterExisting = m_hypos.find(hypo);
TRACE_ERR("Offending hypo = " << **iterExisting << endl);
- assert(false);
+ CHECK(false);
}
return false;
} else {
@@ -137,7 +137,7 @@ bool HypothesisStackCubePruning::AddPrune(Hypothesis *hypo)
void HypothesisStackCubePruning::AddInitial(Hypothesis *hypo)
{
std::pair<iterator, bool> addRet = Add(hypo);
- assert (addRet.second);
+ CHECK(addRet.second);
const WordsBitmap &bitmap = hypo->GetWordsBitmap();
m_bitmapAccessor[bitmap] = new BitmapContainer(bitmap, *this);
diff --git a/moses/src/HypothesisStackNormal.cpp b/moses/src/HypothesisStackNormal.cpp
index 62393cde3..7d72f76aa 100644
--- a/moses/src/HypothesisStackNormal.cpp
+++ b/moses/src/HypothesisStackNormal.cpp
@@ -109,7 +109,7 @@ bool HypothesisStackNormal::AddPrune(Hypothesis *hypo)
// equiv hypo exists, recombine with other hypo
iterator &iterExisting = addRet.first;
Hypothesis *hypoExisting = *iterExisting;
- assert(iterExisting != m_hypos.end());
+ CHECK(iterExisting != m_hypos.end());
m_manager.GetSentenceStats().AddRecombination(*hypo, **iterExisting);
diff --git a/moses/src/Jamfile b/moses/src/Jamfile
new file mode 100644
index 000000000..f89fe4118
--- /dev/null
+++ b/moses/src/Jamfile
@@ -0,0 +1,17 @@
+alias headers : ../../util//kenutil : : : <include>. ;
+
+alias ThreadPool : ThreadPool.cpp ;
+
+if [ option.get "with-synlm" : no : yes ] = yes
+{
+ lib m ;
+ obj SyntacticLanguageModel.o : SyntacticLanguageModel.cpp headers : <include>$(TOP)/synlm/hhmm/rvtl/include <include>$(TOP)/synlm/hhmm/wsjparse/include ;
+ alias synlm : SyntacticLanguageModel.o m : : : <define>HAVE_SYNLM ;
+} else {
+ alias synlm ;
+}
+
+lib moses :
+#All cpp files except those listed
+[ glob *.cpp DynSAInclude/*.cpp : ThreadPool.cpp SyntacticLanguageModel.cpp ]
+synlm ThreadPool LM//LM headers ../..//z ../../OnDiskPt//OnDiskPt ;
diff --git a/moses/src/LM/Factory.cpp b/moses/src/LM/Factory.cpp
index 42385085b..5d877e9d6 100644
--- a/moses/src/LM/Factory.cpp
+++ b/moses/src/LM/Factory.cpp
@@ -26,6 +26,35 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "TypeDef.h"
#include "FactorCollection.h"
+/////////////////////////////////////////////////
+// for those using autoconf/automake
+#if HAVE_CONFIG_H
+#include "config.h"
+
+#define LM_REMOTE 1
+# ifdef HAVE_SRILM
+# define LM_SRI 1
+# else
+# undef LM_SRI
+# endif
+
+# ifdef HAVE_IRSTLM
+# define LM_IRST 1
+# endif
+
+# ifdef HAVE_RANDLM
+# define LM_RAND 1
+# endif
+
+# ifdef HAVE_ORLM
+# define LM_ORLM 1
+# endif
+
+# ifdef HAVE_DMAPLM
+# define LM_DMAP
+# endif
+#endif
+
// include appropriate header
#ifdef LM_SRI
# include "LM/SRI.h"
@@ -35,7 +64,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# include "LM/IRST.h"
#endif
#ifdef LM_RAND
-# include "LM/RandLM.h"
+# include "LM/Rand.h"
#endif
#ifdef LM_ORLM
# include "LM/ORLM.h"
@@ -43,9 +72,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#ifdef LM_REMOTE
# include "LM/Remote.h"
#endif
-#ifdef LM_KEN
-# include "LM/Ken.h"
-#endif
+#include "LM/Ken.h"
#ifdef LM_DMAP
# include "LM/DMapLM.h"
#endif
@@ -68,18 +95,13 @@ LanguageModel* CreateLanguageModel(LMImplementation lmImplementation
, int dub)
{
if (lmImplementation == Ken || lmImplementation == LazyKen) {
-#ifdef LM_KEN
return ConstructKenLM(languageModelFile, factorTypes[0], lmImplementation == LazyKen);
-#else
- UserMessage::Add("KenLM isn't compiled in but your config asked for it");
- return NULL;
-#endif
}
LanguageModelImplementation *lm = NULL;
switch (lmImplementation) {
case RandLM:
#ifdef LM_RAND
- lm = new LanguageModelRandLM();
+ lm = NewRandLM();
#endif
break;
case ORLM:
@@ -110,7 +132,7 @@ LanguageModel* CreateLanguageModel(LMImplementation lmImplementation
break;
case ParallelBackoff:
#ifdef LM_SRI
- lm = new LanguageModelParallelBackoff();
+ lm = NewParallelBackoff();
#endif
break;
case DMapLM:
diff --git a/moses/src/LM/IRST.cpp b/moses/src/LM/IRST.cpp
index 72a83c456..692385b4d 100644
--- a/moses/src/LM/IRST.cpp
+++ b/moses/src/LM/IRST.cpp
@@ -19,7 +19,6 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
#include <limits>
#include <iostream>
#include <fstream>
diff --git a/moses/src/LM/Implementation.cpp b/moses/src/LM/Implementation.cpp
index ad966387d..589ed375a 100644
--- a/moses/src/LM/Implementation.cpp
+++ b/moses/src/LM/Implementation.cpp
@@ -19,7 +19,7 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <limits>
#include <iostream>
#include <memory>
@@ -95,12 +95,12 @@ void LanguageModelImplementation::CalcScore(const Phrase &phrase, float &fullSco
}
} else {
ShiftOrPush(contextFactor, word);
- assert(contextFactor.size() <= GetNGramOrder());
+ CHECK(contextFactor.size() <= GetNGramOrder());
if (word == GetSentenceStartArray()) {
// do nothing, don't include prob for <s> unigram
if (currPos != 0) {
- std::cerr << "Your data contains <s> in a position other than the first word." << std::endl;
+ std::cerr << "Either your data contains <s> in a position other than the first word or your language model is missing <s>. Did you build your ARPA using IRSTLM and forget to run add-start-end.sh?" << std::endl;
abort();
}
} else {
@@ -264,7 +264,7 @@ private:
*/
size_t CalcSuffix(const ChartHypothesis &hypo, int featureID, Phrase &ret, size_t size) const
{
- assert(m_contextPrefix.GetSize() <= m_numTargetTerminals);
+ CHECK(m_contextPrefix.GetSize() <= m_numTargetTerminals);
// special handling for small hypotheses
// does the prefix match the entire hypothesis string? -> just copy prefix
@@ -310,8 +310,8 @@ private:
public:
LanguageModelChartState(const ChartHypothesis &hypo, int featureID, size_t order)
:m_lmRightContext(NULL)
- ,m_contextPrefix(Output, order - 1)
- ,m_contextSuffix(Output, order - 1)
+ ,m_contextPrefix(order - 1)
+ ,m_contextSuffix( order - 1)
,m_hypo(hypo)
{
m_numTargetTerminals = hypo.GetCurrTargetPhrase().GetNumTerminals();
@@ -407,7 +407,7 @@ FFState* LanguageModelImplementation::EvaluateChart(const ChartHypothesis& hypo,
// beginning of sentence symbol <s>? -> just update state
if (word == GetSentenceStartArray())
{
- assert(phrasePos == 0);
+ CHECK(phrasePos == 0);
delete lmState;
lmState = NewState( GetBeginSentenceState() );
}
diff --git a/moses/src/LM/Jamfile b/moses/src/LM/Jamfile
new file mode 100644
index 000000000..2f3cde8ef
--- /dev/null
+++ b/moses/src/LM/Jamfile
@@ -0,0 +1,84 @@
+import option path build-system ;
+
+local dependencies = ;
+
+local with-irstlm = [ option.get "with-irstlm" ] ;
+if $(with-irstlm) {
+ lib irstlm : : <search>$(with-irstlm)/lib ;
+ obj IRST.o : IRST.cpp ..//headers : <include>$(with-irstlm)/include ;
+ alias irst : IRST.o irstlm : : : <define>LM_IRST ;
+ dependencies += irst ;
+ echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ;
+ echo "!!! You are linking the IRSTLM library; be sure the release is >= 5.70.02 !!!" ;
+ echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ;
+}
+
+local with-srilm = [ option.get "with-srilm" ] ;
+if $(with-srilm) {
+ if [ option.get "with-srilm-dynamic" : no : yes ] = yes {
+ alias sri-libs : srilm ;
+ } else {
+ sri-arch = [ option.get "with-srilm-arch" ] ;
+ sri-arch ?= [ _shell $(with-srilm)/sbin/machine-type ] ;
+ sri-lib = <search>$(with-srilm)/lib/$(sri-arch) <search>$(with-srilm)/flm/obj/$(sri-arch) ;
+
+ lib flm : : $(sri-lib) ;
+ lib misc : flm : $(sri-lib) ;
+ lib dstruct : misc flm : $(sri-lib) ;
+ lib oolm : dstruct misc flm : $(sri-lib) ;
+
+ alias sri-libs : oolm dstruct misc flm ;
+ }
+
+ obj SRI.o : SRI.cpp ..//headers : <include>$(with-srilm)/include <warnings>off ;
+ obj ParallelBackoff.o : ParallelBackoff.cpp ..//headers : <include>$(with-srilm)/include <warnings>off ;
+ alias sri : SRI.o ParallelBackoff.o sri-libs : : : <define>LM_SRI ;
+ dependencies += sri ;
+}
+
+local with-randlm = [ option.get "with-randlm" ] ;
+if $(with-randlm) {
+ lib randlm : : <search>$(with-randlm)/lib ;
+ obj Rand.o : Rand.cpp randlm ..//headers : <include>$(with-randlm)/include ;
+ alias rand : Rand.o : : : <define>LM_RAND ;
+ dependencies += rand ;
+}
+
+obj Factory.o : Factory.cpp ..//headers $(dependencies) ;
+
+lib LM : Base.cpp Factory.o Implementation.cpp Joint.cpp Ken.cpp MultiFactor.cpp Remote.cpp SingleFactor.cpp
+ ../../../lm//kenlm ..//headers $(dependencies) ;
+
+#Huge kludge to force building if different --with options are passed.
+#Could have used features like <srilm>on but getting these to apply only to linking was ugly and it still didn't trigger an install (since the install path doesn't encode features).
+path-constant LM-LOG : bin/lm.log ;
+#Is there no other way to read a file with bjam?
+local previous = none ;
+if [ path.exists $(LM-LOG) ] {
+ previous = [ _shell "cat $(LM-LOG)" ] ;
+}
+current = "" ;
+for local i in srilm irstlm randlm {
+ local optval = [ option.get "with-$(i)" ] ;
+ if $(optval) {
+ current = "$(current) --with-$(i)=$(optval)" ;
+ }
+}
+
+if $(current) != $(previous) {
+ #Write inconsistent while the build is running
+ if [ path.exists $(LM-LOG) ] {
+ local ignored = @($(LM-LOG):E=inconsistent) ;
+ }
+ #Write $(current) to $(LM-LOG) after the build completes.
+ rule post-build ( ok ? ) {
+ if $(ok) {
+ local ignored = @($(LM-LOG):E=$(current)) ;
+ }
+ }
+ IMPORT $(__name__) : post-build : : $(__name__).post-build ;
+ build-system.set-post-build-hook $(__name__).post-build ;
+
+ always Factory.o ;
+ always LM ;
+}
diff --git a/moses/src/LM/Ken.cpp b/moses/src/LM/Ken.cpp
index cf19f3376..c5307ffc0 100644
--- a/moses/src/LM/Ken.cpp
+++ b/moses/src/LM/Ken.cpp
@@ -19,7 +19,6 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
#include <cstring>
#include <iostream>
#include <memory>
@@ -197,7 +196,7 @@ template <class Model> void LanguageModelKen<Model>::CalcScore(const Phrase &phr
} else {
lm::WordIndex index = TranslateID(word);
if (index == m_ngram->GetVocabulary().BeginSentence()) {
- std::cerr << "Your data contains <s> in a position other than the first word." << std::endl;
+ std::cerr << "Either your data contains <s> in a position other than the first word or your language model is missing <s>. Did you build your ARPA using IRSTLM and forget to run add-start-end.sh?" << std::endl;
abort();
}
float score = TransformLMScore(m_ngram->Score(*state0, index, *state1));
diff --git a/moses/src/LM/ORLM.cpp b/moses/src/LM/ORLM.cpp
index 4fb365ff8..30f3ada55 100644
--- a/moses/src/LM/ORLM.cpp
+++ b/moses/src/LM/ORLM.cpp
@@ -1,4 +1,3 @@
-#include <cassert>
#include <limits>
#include <iostream>
#include <fstream>
@@ -22,7 +21,6 @@ bool LanguageModelORLM::Load(const std::string &filePath, FactorType factorType,
m_lm = new OnlineRLM<T>(&fLmIn, m_nGramOrder);
fLmIn.close();
//m_lm = new MultiOnlineRLM<T>(m_filePath, m_nGramOrder);
- assert(m_lm != NULL);
// get special word ids
m_oov_id = m_lm->vocab_->GetWordID("<unk>");
CreateFactors();
diff --git a/moses/src/LM/ParallelBackoff.cpp b/moses/src/LM/ParallelBackoff.cpp
index 70638d465..ec2fb2f78 100644
--- a/moses/src/LM/ParallelBackoff.cpp
+++ b/moses/src/LM/ParallelBackoff.cpp
@@ -20,21 +20,71 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
#include "LM/ParallelBackoff.h"
-#include "File.h"
+
+#include <vector>
+#include <string>
+#include <sstream>
+#include <fstream>
+
+#include "LM/MultiFactor.h"
+#include "Word.h"
+#include "Factor.h"
+#include "FactorTypeSet.h"
+#include "FactorCollection.h"
+#include "Phrase.h"
#include "TypeDef.h"
#include "Util.h"
+
#include "FNgramSpecs.h"
#include "FNgramStats.h"
#include "FactoredVocab.h"
#include "FNgram.h"
#include "wmatrix.h"
#include "Vocab.h"
+#include "File.h"
using namespace std;
namespace Moses
{
+namespace
+{
+class LanguageModelParallelBackoff : public LanguageModelMultiFactor
+{
+private:
+ std::vector<FactorType> m_factorTypesOrdered;
+
+ FactoredVocab *m_srilmVocab;
+ FNgram *m_srilmModel;
+ VocabIndex m_unknownId;
+ VocabIndex m_wtid;
+ VocabIndex m_wtbid;
+ VocabIndex m_wteid;
+ FNgramSpecs<FNgramCount>* fnSpecs;
+ //std::vector<VocabIndex> m_lmIdLookup;
+ std::map<size_t, VocabIndex>* lmIdMap;
+ std::fstream* debugStream;
+
+ WidMatrix *widMatrix;
+
+public:
+ ~LanguageModelParallelBackoff();
+
+ bool Load(const std::string &filePath, const std::vector<FactorType> &factorTypes, size_t nGramOrder);
+
+ VocabIndex GetLmID( const std::string &str ) const;
+
+ VocabIndex GetLmID( const Factor *factor, FactorType ft ) const;
+
+ void CreateFactors();
+
+ LMResult GetValueForgotState(const std::vector<const Word*> &contextFactor, FFState &outState) const;
+ const FFState *GetNullContextState() const;
+ const FFState *GetBeginSentenceState() const;
+ FFState *NewState(const FFState *from) const;
+};
+
LanguageModelParallelBackoff::~LanguageModelParallelBackoff()
{
///
@@ -67,7 +117,6 @@ bool LanguageModelParallelBackoff::Load(const std::string &filePath, const std::
cerr << "Factored stats\n";
FNgram* fngramLM = new FNgram(*m_srilmVocab,*fnSpecs);
- assert(fngramLM != 0);
cerr << "FNgram object created\n";
@@ -295,5 +344,12 @@ const FFState *LanguageModelParallelBackoff::GetBeginSentenceState() const
{
return NULL;
}
+
+}
+
+LanguageModelMultiFactor *NewParallelBackoff() {
+ return new LanguageModelParallelBackoff();
+}
+
}
diff --git a/moses/src/LM/ParallelBackoff.h b/moses/src/LM/ParallelBackoff.h
index fc39a8dcf..8e4241395 100644
--- a/moses/src/LM/ParallelBackoff.h
+++ b/moses/src/LM/ParallelBackoff.h
@@ -21,72 +21,11 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#pragma once
-#include <vector>
-#include <string>
-#include <sstream>
-#include <fstream>
-
-#include "LM/MultiFactor.h"
-#include "Word.h"
-#include "Factor.h"
-#include "FactorTypeSet.h"
-#include "FactorCollection.h"
-#include "Phrase.h"
-
-#include "FNgramStats.h"
-#include "FactoredVocab.h"
-#include "FNgram.h"
-#include "wmatrix.h"
-#include "Vocab.h"
-
-using namespace std;
-
-//class FactoredVocab;
-//class FNgram;
-//class WidMatrix;
-
-
namespace Moses
{
-/** LM of multiple factors. A simple extension of single factor LM - factors backoff together.
- * Rather slow as this uses string concatenation/split
-*/
-class LanguageModelParallelBackoff : public LanguageModelMultiFactor
-{
-private:
- std::vector<FactorType> m_factorTypesOrdered;
-
- FactoredVocab *m_srilmVocab;
- FNgram *m_srilmModel;
- VocabIndex m_unknownId;
- VocabIndex m_wtid;
- VocabIndex m_wtbid;
- VocabIndex m_wteid;
- FNgramSpecs<FNgramCount>* fnSpecs;
- //std::vector<VocabIndex> m_lmIdLookup;
- std::map<size_t, VocabIndex>* lmIdMap;
- std::fstream* debugStream;
-
- WidMatrix *widMatrix;
-
-public:
- LanguageModelParallelBackoff(){}
-
- ~LanguageModelParallelBackoff();
- bool Load(const std::string &filePath, const std::vector<FactorType> &factorTypes, size_t nGramOrder);
-
- VocabIndex GetLmID( const std::string &str ) const;
-
- VocabIndex GetLmID( const Factor *factor, FactorType ft ) const;
-
- void CreateFactors();
-
- LMResult GetValueForgotState(const std::vector<const Word*> &contextFactor, FFState &outState) const;
- const FFState *GetNullContextState() const;
- const FFState *GetBeginSentenceState() const;
- FFState *NewState(const FFState *from) const;
+class LanguageModelMultiFactor;
-};
+LanguageModelMultiFactor *NewParallelBackoff();
}
diff --git a/moses/src/LM/RandLM.cpp b/moses/src/LM/Rand.cpp
index de03ea58d..93411b8d3 100644
--- a/moses/src/LM/RandLM.cpp
+++ b/moses/src/LM/Rand.cpp
@@ -17,21 +17,62 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <limits>
#include <iostream>
#include <fstream>
+#include <string>
+#include <vector>
+#include "Factor.h"
+#include "Util.h"
+#include "LM/SingleFactor.h"
+#include "RandLM.h"
-#include "LM/RandLM.h"
+
+#include "LM/Rand.h"
#include "FactorCollection.h"
#include "Phrase.h"
#include "InputFileStream.h"
#include "StaticData.h"
+
namespace Moses
{
+namespace
+{
using namespace std;
+class LanguageModelRandLM : public LanguageModelPointerState
+{
+public:
+ LanguageModelRandLM()
+ : m_lm(0) {}
+ bool Load(const std::string &filePath, FactorType factorType, size_t nGramOrder);
+ virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
+ ~LanguageModelRandLM() {
+ delete m_lm;
+ }
+ void CleanUpAfterSentenceProcessing() {
+ m_lm->clearCaches(); // clear caches
+ }
+ void InitializeBeforeSentenceProcessing() {
+ m_lm->initThreadSpecificData(); // Creates thread specific data iff
+ // compiled with multithreading.
+ }
+protected:
+ std::vector<randlm::WordID> m_randlm_ids_vec;
+ randlm::RandLM* m_lm;
+ randlm::WordID m_oov_id;
+ void CreateFactors(FactorCollection &factorCollection);
+ randlm::WordID GetLmID( const std::string &str ) const;
+ randlm::WordID GetLmID( const Factor *factor ) const {
+ size_t factorId = factor->GetId();
+ return ( factorId >= m_randlm_ids_vec.size()) ? m_oov_id : m_randlm_ids_vec[factorId];
+ };
+
+};
+
+
bool LanguageModelRandLM::Load(const std::string &filePath, FactorType factorType,
size_t nGramOrder)
{
@@ -42,7 +83,7 @@ bool LanguageModelRandLM::Load(const std::string &filePath, FactorType factorTyp
m_nGramOrder = nGramOrder;
int cache_MB = 50; // increase cache size
m_lm = randlm::RandLM::initRandLM(filePath, nGramOrder, cache_MB);
- assert(m_lm != NULL);
+ CHECK(m_lm != NULL);
// get special word ids
m_oov_id = m_lm->getWordID(m_lm->getOOV());
CreateFactors(factorCollection);
@@ -115,4 +156,9 @@ LMResult LanguageModelRandLM::GetValue(const vector<const Word*> &contextFactor,
}
+LanguageModelPointerState *NewRandLM() {
+ return new LanguageModelRandLM();
+}
+
+}
diff --git a/moses/src/LM/Rand.h b/moses/src/LM/Rand.h
new file mode 100644
index 000000000..c4ef99947
--- /dev/null
+++ b/moses/src/LM/Rand.h
@@ -0,0 +1,30 @@
+/***********************************************************************
+Moses - factored phrase-based language decoder
+Copyright (C) 2006 University of Edinburgh
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#ifndef moses_LM_Rand_h
+#define moses_LM_Rand_h
+
+namespace Moses
+{
+class LanguageModelPointerState;
+LanguageModelPointerState *NewRandLM();
+
+}
+
+#endif
diff --git a/moses/src/LM/RandLM.h b/moses/src/LM/RandLM.h
deleted file mode 100644
index 13aefb311..000000000
--- a/moses/src/LM/RandLM.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/***********************************************************************
-Moses - factored phrase-based language decoder
-Copyright (C) 2006 University of Edinburgh
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation; either
-version 2.1 of the License, or (at your option) any later version.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-***********************************************************************/
-
-#ifndef moses_LanguageModelRandLM_h
-#define moses_LanguageModelRandLM_h
-
-#include <string>
-#include <vector>
-#include "Factor.h"
-#include "Util.h"
-#include "LM/SingleFactor.h"
-#include "RandLM.h"
-
-class randlm::RandLM;
-
-namespace Moses
-{
-class Factor;
-class Phrase;
-
-// RandLM wrapper (single factor LM)
-
-class LanguageModelRandLM : public LanguageModelPointerState
-{
-public:
- LanguageModelRandLM() : m_lm(0) {}
- bool Load(const std::string &filePath, FactorType factorType, size_t nGramOrder);
- virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
- ~LanguageModelRandLM() {
- delete m_lm;
- }
- void CleanUpAfterSentenceProcessing() {
- m_lm->clearCaches(); // clear caches
- }
- void InitializeBeforeSentenceProcessing() {
- m_lm->initThreadSpecificData(); // Creates thread specific data iff
- // compiled with multithreading.
- }
-protected:
- std::vector<randlm::WordID> m_randlm_ids_vec;
- randlm::RandLM* m_lm;
- randlm::WordID m_oov_id;
- void CreateFactors(FactorCollection &factorCollection);
- randlm::WordID GetLmID( const std::string &str ) const;
- randlm::WordID GetLmID( const Factor *factor ) const {
- size_t factorId = factor->GetId();
- return ( factorId >= m_randlm_ids_vec.size()) ? m_oov_id : m_randlm_ids_vec[factorId];
- };
-
-};
-
-}
-
-#endif
diff --git a/moses/src/LM/SRI.cpp b/moses/src/LM/SRI.cpp
index 0c93af279..9c5a9c3d4 100644
--- a/moses/src/LM/SRI.cpp
+++ b/moses/src/LM/SRI.cpp
@@ -19,7 +19,7 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <limits>
#include <iostream>
#include <fstream>
@@ -31,6 +31,9 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "Phrase.h"
#include "StaticData.h"
+#include "Vocab.h"
+#include "Ngram.h"
+
using namespace std;
namespace Moses
@@ -149,7 +152,7 @@ LMResult LanguageModelSRI::GetValue(const vector<const Word*> &contextFactor, St
}
ngram[count] = Vocab_None;
- assert((*contextFactor[count-1])[factorType] != NULL);
+ CHECK((*contextFactor[count-1])[factorType] != NULL);
// call sri lm fn
VocabIndex lmId = GetLmID((*contextFactor[count-1])[factorType]);
ret = GetValue(lmId, ngram+1);
diff --git a/moses/src/LM/SRI.h b/moses/src/LM/SRI.h
index 19932b9c9..69d55f117 100644
--- a/moses/src/LM/SRI.h
+++ b/moses/src/LM/SRI.h
@@ -26,12 +26,12 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include <vector>
#include "Factor.h"
#include "TypeDef.h"
-#include "Vocab.h"
-#include "Ngram.h"
#include "LM/SingleFactor.h"
class Factor;
class Phrase;
+class Vocab;
+class Ngram;
namespace Moses
{
@@ -39,15 +39,15 @@ namespace Moses
class LanguageModelSRI : public LanguageModelPointerState
{
protected:
- std::vector<VocabIndex> m_lmIdLookup;
+ std::vector<unsigned int> m_lmIdLookup;
::Vocab *m_srilmVocab;
Ngram *m_srilmModel;
- VocabIndex m_unknownId;
+ unsigned int m_unknownId;
- LMResult GetValue(VocabIndex wordId, VocabIndex *context) const;
+ LMResult GetValue(unsigned int wordId, unsigned int *context) const;
void CreateFactors();
- VocabIndex GetLmID( const std::string &str ) const;
- VocabIndex GetLmID( const Factor *factor ) const;
+ unsigned int GetLmID( const std::string &str ) const;
+ unsigned int GetLmID( const Factor *factor ) const;
public:
LanguageModelSRI();
diff --git a/moses/src/LM/SingleFactor.cpp b/moses/src/LM/SingleFactor.cpp
index 10f76ac00..73c9e6e83 100644
--- a/moses/src/LM/SingleFactor.cpp
+++ b/moses/src/LM/SingleFactor.cpp
@@ -19,7 +19,6 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
#include <limits>
#include <iostream>
#include <sstream>
diff --git a/moses/src/LVoc.h b/moses/src/LVoc.h
index 3380c4b62..ec20fe3cd 100644
--- a/moses/src/LVoc.h
+++ b/moses/src/LVoc.h
@@ -37,11 +37,11 @@ public:
std::pair<typename M::iterator,bool> p
=m.insert(std::make_pair(k,data.size()));
if(p.second) data.push_back(k);
- assert(static_cast<size_t>(p.first->second)<data.size());
+ CHECK(static_cast<size_t>(p.first->second)<data.size());
return p.first->second;
}
Key const& symbol(LabelId i) const {
- assert(static_cast<size_t>(i)<data.size());
+ CHECK(static_cast<size_t>(i)<data.size());
return data[i];
}
diff --git a/moses/src/LexicalReordering.cpp b/moses/src/LexicalReordering.cpp
index 91affd7d8..b32b96b0d 100644
--- a/moses/src/LexicalReordering.cpp
+++ b/moses/src/LexicalReordering.cpp
@@ -72,7 +72,7 @@ LexicalReordering::~LexicalReordering()
Scores LexicalReordering::GetProb(const Phrase& f, const Phrase& e) const
{
- return m_table->GetScore(f, e, Phrase(Output, ARRAY_SIZE_INCR));
+ return m_table->GetScore(f, e, Phrase(ARRAY_SIZE_INCR));
}
FFState* LexicalReordering::Evaluate(const Hypothesis& hypo,
diff --git a/moses/src/LexicalReordering.h b/moses/src/LexicalReordering.h
index 71c225062..e979b5bf5 100644
--- a/moses/src/LexicalReordering.h
+++ b/moses/src/LexicalReordering.h
@@ -51,7 +51,7 @@ public:
virtual FFState* EvaluateChart(const ChartHypothesis&,
int /* featureID */,
ScoreComponentCollection*) const {
- assert(0); // not valid for chart decoder
+ CHECK(0); // not valid for chart decoder
return NULL;
}
diff --git a/moses/src/LexicalReorderingState.cpp b/moses/src/LexicalReorderingState.cpp
index fb1de0e81..346ac745b 100644
--- a/moses/src/LexicalReorderingState.cpp
+++ b/moses/src/LexicalReorderingState.cpp
@@ -1,7 +1,7 @@
#include <vector>
#include <string>
-#include <cassert>
+#include "util/check.hh"
#include "FFState.h"
#include "Hypothesis.h"
@@ -126,7 +126,7 @@ LexicalReorderingState *LexicalReorderingConfiguration::CreateLexicalReorderingS
void LexicalReorderingState::CopyScores(Scores& scores, const TranslationOption &topt, ReorderingType reoType) const
{
// don't call this on a bidirectional object
- assert(m_direction == LexicalReorderingConfiguration::Backward || m_direction == LexicalReorderingConfiguration::Forward);
+ CHECK(m_direction == LexicalReorderingConfiguration::Backward || m_direction == LexicalReorderingConfiguration::Forward);
const Scores *cachedScores = (m_direction == LexicalReorderingConfiguration::Backward) ?
topt.GetCachedScores(m_configuration.GetScoreProducer()) : m_prevScore;
@@ -188,7 +188,7 @@ int PhraseBasedReorderingState::Compare(const FFState& o) const
return 0;
const PhraseBasedReorderingState* other = dynamic_cast<const PhraseBasedReorderingState*>(&o);
- assert(other != NULL);
+ CHECK(other != NULL);
if (m_prevRange == other->m_prevRange) {
if (m_direction == LexicalReorderingConfiguration::Forward) {
return ComparePrevScores(other->m_prevScore);
@@ -405,7 +405,7 @@ int HierarchicalReorderingForwardState::Compare(const FFState& o) const
return 0;
const HierarchicalReorderingForwardState* other = dynamic_cast<const HierarchicalReorderingForwardState*>(&o);
- assert(other != NULL);
+ CHECK(other != NULL);
if (m_prevRange == other->m_prevRange) {
return ComparePrevScores(other->m_prevScore);
} else if (m_prevRange < other->m_prevRange) {
diff --git a/moses/src/LexicalReorderingTable.cpp b/moses/src/LexicalReorderingTable.cpp
index a4735d64d..00be0a495 100644
--- a/moses/src/LexicalReorderingTable.cpp
+++ b/moses/src/LexicalReorderingTable.cpp
@@ -258,7 +258,7 @@ Scores LexicalReorderingTableTree::GetScore(const Phrase& f, const Phrase& e, co
}
if(m_FactorsC.empty()) {
- assert(1 == cands.size());
+ CHECK(1 == cands.size());
return cands[0].GetScore(0);
} else {
score = auxFindScoreForContext(cands, c);
@@ -273,7 +273,7 @@ Scores LexicalReorderingTableTree::GetScore(const Phrase& f, const Phrase& e, co
Scores LexicalReorderingTableTree::auxFindScoreForContext(const Candidates& cands, const Phrase& context)
{
if(m_FactorsC.empty()) {
- assert(cands.size() <= 1);
+ CHECK(cands.size() <= 1);
return (1 == cands.size())?(cands[0].GetScore(0)):(Scores());
} else {
std::vector<std::string> cvec;
@@ -380,7 +380,7 @@ bool LexicalReorderingTableTree::Create(std::istream& inFile,
}
} else {
//sanity check ALL lines must have same number of tokens
- assert(numTokens == tokens.size());
+ CHECK(numTokens == tokens.size());
}
int phrase = 0;
for(; phrase < numKeyTokens; ++phrase) {
@@ -424,7 +424,7 @@ bool LexicalReorderingTableTree::Create(std::istream& inFile,
if(currKey.empty()) {
currKey = key;
//insert key into tree
- assert(psa);
+ CHECK(psa);
PSA::Data& d = psa->insert(key);
if(d == InvalidOffT) {
d = fTell(ot);
@@ -454,7 +454,7 @@ bool LexicalReorderingTableTree::Create(std::istream& inFile,
currFirstWord = key[0];
}
//c) insert key into tree
- assert(psa);
+ CHECK(psa);
PSA::Data& d = psa->insert(key);
if(d == InvalidOffT) {
d = fTell(ot);
@@ -571,8 +571,8 @@ void LexicalReorderingTableTree::auxCacheForSrcPhrase(const Phrase& f)
if(m_FactorsE.empty()) {
//f is all of key...
Candidates cands;
- m_Table->GetCandidates(MakeTableKey(f,Phrase(Output, ARRAY_SIZE_INCR)),&cands);
- m_Cache[MakeCacheKey(f,Phrase(Output, ARRAY_SIZE_INCR))] = cands;
+ m_Table->GetCandidates(MakeTableKey(f,Phrase(ARRAY_SIZE_INCR)),&cands);
+ m_Cache[MakeCacheKey(f,Phrase(ARRAY_SIZE_INCR))] = cands;
} else {
ObjectPool<PPimp> pool;
PPimp* pPos = m_Table->GetRoot();
diff --git a/moses/src/Makefile.am b/moses/src/Makefile.am
deleted file mode 100644
index d99451f3d..000000000
--- a/moses/src/Makefile.am
+++ /dev/null
@@ -1,357 +0,0 @@
-lib_LTLIBRARIES = libmoses.la
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES $(BOOST_CPPFLAGS)
-
-libmoses_ladir = ${includedir}
-
-libmoses_la_HEADERS = \
- AlignmentInfo.h \
- AlignmentInfoCollection.h \
- BilingualDynSuffixArray.h \
- BitmapContainer.h \
- BleuScoreFeature.h \
- CellCollection.h \
- ChartCell.h \
- ChartCellCollection.h \
- ChartHypothesis.h \
- ChartHypothesisCollection.h \
- ChartManager.h \
- ChartRuleLookupManager.h \
- ChartRuleLookupManagerMemory.h \
- ChartRuleLookupManagerOnDisk.h \
- ChartTranslationOption.h \
- ChartTranslationOptionCollection.h \
- ChartTranslationOptionList.h \
- ChartTrellisDetour.h \
- ChartTrellisDetourQueue.h \
- ChartTrellisNode.h \
- ChartTrellisPath.h \
- ChartTrellisPathList.h \
- ConfusionNet.h \
- DecodeFeature.h \
- DecodeGraph.h \
- DecodeStep.h \
- DecodeStepGeneration.h \
- DecodeStepTranslation.h \
- Dictionary.h \
- DotChart.h \
- DotChartInMemory.h \
- DotChartOnDisk.h \
- DummyScoreProducers.h \
- DynSAInclude/file.h \
- DynSAInclude/vocab.h \
- DynSuffixArray.h \
- FFState.h \
- Factor.h \
- FactorCollection.h \
- FactorTypeSet.h \
- FeatureFunction.h \
- FeatureVector.h \
- File.h \
- FilePtr.h \
- FloydWarshall.h \
- GenerationDictionary.h \
- GlobalLexicalModel.h \
- gzfilebuf.h \
- hash.h \
- Hypothesis.h \
- HypothesisStack.h \
- HypothesisStackCubePruning.h \
- HypothesisStackNormal.h \
- InputFileStream.h \
- InputType.h \
- LMList.h \
- LVoc.h \
- LM/Base.h \
- LM/Joint.h \
- LM/Factory.h \
- LM/Implementation.h \
- LM/MultiFactor.h \
- LM/Remote.h \
- LM/SingleFactor.h \
- LM/Ken.h \
- LexicalReordering.h \
- LexicalReorderingState.h \
- LexicalReorderingTable.h \
- Manager.h \
- NonTerminal.h \
- ObjectPool.h \
- OnlineCommand.h \
- PCNTools.h \
- PDTAimp.h \
- Parameter.h \
- PartialTranslOptColl.h \
- Phrase.h \
- PhraseBoundaryFeature.h \
- PhraseDictionary.h \
- PhraseDictionaryALSuffixArray.h \
- PhraseDictionaryDynSuffixArray.h \
- PhraseDictionaryMemory.h \
- PhraseDictionarySCFG.h \
- PhraseDictionaryNode.h \
- PhraseDictionaryNodeSCFG.h \
- PhraseDictionaryOnDisk.h \
- PhraseDictionaryTree.h \
- PhraseDictionaryTreeAdaptor.h \
- PhraseLengthFeature.h \
- PhrasePairFeature.h \
- PrefixTree.h \
- PrefixTreeMap.h \
- ReorderingConstraint.h \
- ReorderingStack.h \
- RuleCube.h \
- RuleCubeItem.h \
- RuleCubeQueue.h \
- RuleTableLoader.h \
- RuleTableLoaderCompact.h \
- RuleTableLoaderFactory.h \
- RuleTableLoaderHiero.h \
- RuleTableLoaderStandard.h \
- ScoreComponentCollection.h \
- ScoreProducer.h \
- Search.h \
- SearchCubePruning.h \
- SearchNormal.h \
- Sentence.h \
- SentenceStats.h \
- SourceWordDeletionFeature.h \
- SparsePhraseDictionaryFeature.h \
- SquareMatrix.h \
- StaticData.h \
- TargetBigramFeature.h \
- TargetNgramFeature.h \
- TargetPhrase.h \
- TargetPhraseCollection.h \
- TargetWordInsertionFeature.h \
- ThreadPool.h \
- Timer.h \
- TranslationOption.h \
- TranslationOptionCollection.h \
- TranslationOptionCollectionConfusionNet.h \
- TranslationOptionCollectionText.h \
- TranslationOptionList.h \
- TranslationSystem.h \
- TreeInput.h \
- TrellisPath.h \
- TrellisPathCollection.h \
- TrellisPathList.h \
- TypeDef.h \
- UniqueObject.h \
- UserMessage.h \
- Util.h \
- Word.h \
- WordLattice.h \
- WordTranslationFeature.h \
- WordsBitmap.h \
- WordsRange.h \
- XmlOption.h
-
-if PROTOBUF
-libmoses_la_HEADERS += rule.pb.h hypergraph.pb.h
-endif
-
-if SRI_LM
-libmoses_la_HEADERS += LM/SRI.h \
- LM/ParallelBackoff.h
-endif
-
-if IRST_LM
-libmoses_la_HEADERS += LM/IRST.h
-endif
-
-if RAND_LM
-libmoses_la_HEADERS += LM/RandLM.h
-endif
-
-if ORLM_LM
-libmoses_la_HEADERS += LM/ORLM.h \
- DynSAInclude/params.h \
- DynSAInclude/hash.h \
- DynSAInclude/quantizer.h \
- DynSAInclude/RandLMFilter.h \
- DynSAInclude/RandLMCache.h
-endif
-
-if SYN_LM
-libmoses_la_HEADERS += SyntacticLanguageModel.h
-endif
-
-libmoses_la_SOURCES = \
- AlignmentInfo.cpp \
- AlignmentInfoCollection.cpp \
- BilingualDynSuffixArray.cpp \
- BitmapContainer.cpp \
- BleuScoreFeature.cpp \
- ChartCell.cpp \
- ChartCellCollection.cpp \
- ChartHypothesis.cpp \
- ChartHypothesisCollection.cpp \
- ChartManager.cpp \
- ChartRuleLookupManager.cpp \
- ChartRuleLookupManagerMemory.cpp \
- ChartRuleLookupManagerOnDisk.cpp \
- ChartTranslationOption.cpp \
- ChartTranslationOptionCollection.cpp \
- ChartTranslationOptionList.cpp \
- ChartTrellisDetour.cpp \
- ChartTrellisDetourQueue.cpp \
- ChartTrellisNode.cpp \
- ChartTrellisPath.cpp \
- ConfusionNet.cpp \
- DecodeFeature.cpp \
- DecodeGraph.cpp \
- DecodeStep.cpp \
- DecodeStepGeneration.cpp \
- DecodeStepTranslation.cpp \
- Dictionary.cpp \
- DotChart.cpp \
- DotChartInMemory.cpp \
- DotChartOnDisk.cpp \
- DummyScoreProducers.cpp \
- DynSAInclude/file.cpp \
- DynSAInclude/vocab.cpp \
- DynSuffixArray.cpp \
- FFState.cpp \
- Factor.cpp \
- FactorCollection.cpp \
- FactorTypeSet.cpp \
- FeatureFunction.cpp \
- FeatureVector.cpp \
- FloydWarshall.cpp \
- GenerationDictionary.cpp \
- GlobalLexicalModel.cpp \
- hash.cpp \
- Hypothesis.cpp \
- HypothesisStack.cpp \
- HypothesisStackCubePruning.cpp \
- HypothesisStackNormal.cpp \
- InputFileStream.cpp \
- InputType.cpp \
- LMList.cpp \
- LVoc.cpp \
- LM/Base.cpp \
- LM/Factory.cpp \
- LM/Implementation.cpp \
- LM/Joint.cpp \
- LM/Ken.cpp \
- LM/MultiFactor.cpp \
- LM/Remote.cpp \
- LM/SingleFactor.cpp \
- LexicalReordering.cpp \
- LexicalReorderingState.cpp \
- LexicalReorderingTable.cpp \
- Manager.cpp \
- OnlineCommand.cpp \
- PCNTools.cpp \
- Parameter.cpp \
- PartialTranslOptColl.cpp \
- Phrase.cpp \
- PhraseBoundaryFeature.cpp \
- PhraseDictionary.cpp \
- PhraseDictionaryALSuffixArray.cpp \
- PhraseDictionaryDynSuffixArray.cpp \
- PhraseDictionaryHiero.cpp \
- PhraseDictionaryMemory.cpp \
- PhraseDictionarySCFG.cpp \
- PhraseDictionaryNode.cpp \
- PhraseDictionaryNodeSCFG.cpp \
- PhraseDictionaryOnDisk.cpp \
- PhraseDictionaryTree.cpp \
- PhraseDictionaryTreeAdaptor.cpp \
- PhraseLengthFeature.cpp \
- PhrasePairFeature.cpp \
- PrefixTreeMap.cpp \
- ReorderingConstraint.cpp \
- ReorderingStack.cpp \
- RuleCube.cpp \
- RuleCubeItem.cpp \
- RuleCubeQueue.cpp \
- RuleTableLoaderCompact.cpp \
- RuleTableLoaderFactory.cpp \
- RuleTableLoaderHiero.cpp \
- RuleTableLoaderStandard.cpp \
- ScoreComponentCollection.cpp \
- ScoreProducer.cpp \
- Search.cpp \
- SearchCubePruning.cpp \
- SearchNormal.cpp \
- Sentence.cpp \
- SentenceStats.cpp \
- SourceWordDeletionFeature.cpp \
- SparsePhraseDictionaryFeature.cpp \
- SquareMatrix.cpp \
- StaticData.cpp \
- TargetBigramFeature.cpp \
- TargetNgramFeature.cpp \
- TargetPhrase.cpp \
- TargetPhraseCollection.cpp \
- TargetWordInsertionFeature.cpp \
- ThreadPool.cpp \
- Timer.cpp \
- TranslationOption.cpp \
- TranslationOptionCollection.cpp \
- TranslationOptionCollectionConfusionNet.cpp \
- TranslationOptionCollectionText.cpp \
- TranslationOptionList.cpp \
- TranslationSystem.cpp \
- TreeInput.cpp \
- TrellisPath.cpp \
- TrellisPathCollection.cpp \
- UserMessage.cpp \
- Util.cpp \
- Word.cpp \
- WordLattice.cpp \
- WordTranslationFeature.cpp \
- WordsBitmap.cpp \
- WordsRange.cpp \
- XmlOption.cpp
-
-if PROTOBUF
-BUILT_SOURCES = \
- rule.pb.h \
- rule.pb.cc \
- hypergraph.pb.h \
- hypergraph.pb.cc
-
-CLEANFILES = $(BUILT_SOURCES)
-SUFFIXES = .proto
-
-rule.pb.cc: rule.proto
- @PROTOC@ --cpp_out=. $<
-rule.pb.h: rule.proto
- @PROTOC@ --cpp_out=. $<
-
-hypergraph.pb.cc: hypergraph.proto
- @PROTOC@ --cpp_out=. $<
-hypergraph.pb.h: hypergraph.proto
- @PROTOC@ --cpp_out=. $<
-
-libmoses_la_SOURCES += rule.pb.cc hypergraph.pb.cc
-
-endif
-
-if SRI_LM
-libmoses_la_SOURCES += LM/SRI.cpp \
- LM/ParallelBackoff.cpp
-
-endif
-
-if IRST_LM
-libmoses_la_SOURCES += LM/IRST.cpp
-endif
-
-if RAND_LM
-libmoses_la_SOURCES += LM/RandLM.cpp
-endif
-
-if ORLM_LM
-libmoses_la_SOURCES += LM/ORLM.cpp \
- DynSAInclude/onlineRLM.h \
- DynSAInclude/perfecthash.h \
- DynSAInclude/params.cpp
-endif
-
-if SYN_LM
-libmoses_la_SOURCES += SyntacticLanguageModel.cpp
-endif
-
-libmoses_la_LIBADD = $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(BOOST_THREAD_LIBS)
diff --git a/moses/src/Manager.cpp b/moses/src/Manager.cpp
index c9fa34307..8d4f306e6 100644
--- a/moses/src/Manager.cpp
+++ b/moses/src/Manager.cpp
@@ -232,7 +232,7 @@ void Manager::CalcNBest(size_t count, TrellisPathList &ret,bool onlyDistinct) co
for (size_t iteration = 0 ; (onlyDistinct ? distinctHyps.size() : ret.GetSize()) < count && contenders.GetSize() > 0 && (iteration < count * nBestFactor) ; iteration++) {
// get next best from list of contenders
TrellisPath *path = contenders.pop();
- assert(path);
+ CHECK(path);
// create deviations from current best
path->CreateDeviantPaths(contenders);
if(onlyDistinct) {
@@ -304,11 +304,11 @@ void Manager::CalcLatticeSamples(size_t count, TrellisPathList &ret) const {
//forward from current
if (i->forward >= 0) {
map<int,const Hypothesis*>::const_iterator idToHypIter = idToHyp.find(i->forward);
- assert(idToHypIter != idToHyp.end());
+ CHECK(idToHypIter != idToHyp.end());
const Hypothesis* nextHypo = idToHypIter->second;
outgoingHyps[hypo].insert(nextHypo);
map<int,float>::const_iterator fscoreIter = fscores.find(nextHypo->GetId());
- assert(fscoreIter != fscores.end());
+ CHECK(fscoreIter != fscores.end());
edgeScores[Edge(hypo->GetId(),nextHypo->GetId())] =
i->fscore - fscoreIter->second;
}
@@ -325,15 +325,15 @@ void Manager::CalcLatticeSamples(size_t count, TrellisPathList &ret) const {
map<const Hypothesis*, set<const Hypothesis*> >::const_iterator outIter =
outgoingHyps.find(i->hypo);
- assert(outIter != outgoingHyps.end());
+ CHECK(outIter != outgoingHyps.end());
float sigma = 0;
for (set<const Hypothesis*>::const_iterator j = outIter->second.begin();
j != outIter->second.end(); ++j) {
map<const Hypothesis*, float>::const_iterator succIter = sigmas.find(*j);
- assert(succIter != sigmas.end());
+ CHECK(succIter != sigmas.end());
map<Edge,float>::const_iterator edgeScoreIter =
edgeScores.find(Edge(i->hypo->GetId(),(*j)->GetId()));
- assert(edgeScoreIter != edgeScores.end());
+ CHECK(edgeScoreIter != edgeScores.end());
float term = edgeScoreIter->second + succIter->second; // Add sigma(*j)
if (sigma == 0) {
sigma = term;
@@ -347,7 +347,7 @@ void Manager::CalcLatticeSamples(size_t count, TrellisPathList &ret) const {
//The actual sampling!
const Hypothesis* startHypo = searchGraph.back().hypo;
- assert(startHypo->GetId() == 0);
+ CHECK(startHypo->GetId() == 0);
for (size_t i = 0; i < count; ++i) {
vector<const Hypothesis*> path;
path.push_back(startHypo);
@@ -365,9 +365,9 @@ void Manager::CalcLatticeSamples(size_t count, TrellisPathList &ret) const {
for (set<const Hypothesis*>::const_iterator j = outIter->second.begin();
j != outIter->second.end(); ++j) {
candidates.push_back(*j);
- assert(sigmas.find(*j) != sigmas.end());
+ CHECK(sigmas.find(*j) != sigmas.end());
Edge edge(path.back()->GetId(),(*j)->GetId());
- assert(edgeScores.find(edge) != edgeScores.end());
+ CHECK(edgeScores.find(edge) != edgeScores.end());
candidateScores.push_back(sigmas[*j] + edgeScores[edge]);
if (scoreTotal == 0) {
scoreTotal = candidateScores.back();
@@ -874,7 +874,7 @@ void Manager::SerializeSearchGraphPB(
ArcList::const_iterator iterArcList;
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
const Hypothesis *loserHypo = *iterArcList;
- assert(connected[loserHypo->GetId()]);
+ CHECK(connected[loserHypo->GetId()]);
Hypergraph_Edge* edge = hg.add_edges();
SerializeEdgeInfo(loserHypo, edge);
edge->set_head_node(headNodeIdx);
diff --git a/moses/src/PDTAimp.h b/moses/src/PDTAimp.h
index ca540d285..5ded31408 100644
--- a/moses/src/PDTAimp.h
+++ b/moses/src/PDTAimp.h
@@ -101,7 +101,7 @@ public:
}
void CleanUp() {
- assert(m_dict);
+ CHECK(m_dict);
m_dict->FreeMemory();
for(size_t i=0; i<m_tgtColls.size(); ++i) delete m_tgtColls[i];
m_tgtColls.clear();
@@ -110,25 +110,10 @@ public:
uniqSrcPhr.clear();
}
- void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase) {
- std::cerr << "AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase)" << std::endl;
- assert(GetTargetPhraseCollection(source)==0);
-
- VERBOSE(2, "adding unk source phrase "<<source<<"\n");
- std::pair<MapSrc2Tgt::iterator,bool> p
- =m_cache.insert(std::make_pair(source,static_cast<TargetPhraseCollection const*>(0)));
- if(p.second || p.first->second==0) {
- TargetPhraseCollection *ptr=new TargetPhraseCollection;
- ptr->Add(new TargetPhrase(targetPhrase));
- p.first->second=ptr;
- m_tgtColls.push_back(ptr);
- } else VERBOSE(2, "WARNING: you added an already existing phrase!\n");
- }
-
TargetPhraseCollection const*
GetTargetPhraseCollection(Phrase const &src) const {
- assert(m_dict);
+ CHECK(m_dict);
if(src.GetSize()==0) return 0;
std::pair<MapSrc2Tgt::iterator,bool> piter;
@@ -245,11 +230,11 @@ public:
std::vector<float> scores;
Phrase src;
- State() : range(0,0),scores(0),src(Input, ARRAY_SIZE_INCR) {}
+ State() : range(0,0),scores(0),src(ARRAY_SIZE_INCR) {}
State(Position b,Position e,const PPtr& v,const std::vector<float>& sv=std::vector<float>(0))
- : ptr(v),range(b,e),scores(sv),src(Input, ARRAY_SIZE_INCR) {}
+ : ptr(v),range(b,e),scores(sv),src(ARRAY_SIZE_INCR) {}
State(Range const& r,const PPtr& v,const std::vector<float>& sv=std::vector<float>(0))
- : ptr(v),range(r),scores(sv),src(Input, ARRAY_SIZE_INCR) {}
+ : ptr(v),range(r),scores(sv),src(ARRAY_SIZE_INCR) {}
Position begin() const {
return range.first;
@@ -292,7 +277,7 @@ public:
for(size_t k=0; k<factorStrings.size(); ++k) {
std::vector<std::string> factors=TokenizeMultiCharSeparator(*factorStrings[k],StaticData::Instance().GetFactorDelimiter());
- assert(factors.size()==m_output.size());
+ CHECK(factors.size()==m_output.size());
Word& w=targetPhrase.AddWord();
for(size_t l=0; l<m_output.size(); ++l) {
w[m_output[l]]= factorCollection.AddFactor(Output, m_output[l], factors[l]);
@@ -338,7 +323,7 @@ public:
};
void CacheSource(ConfusionNet const& src) {
- assert(m_dict);
+ CHECK(m_dict);
const size_t srcSize=src.GetSize();
std::vector<size_t> exploredPaths(srcSize+1,0);
@@ -385,7 +370,7 @@ public:
State curr(stack.back());
stack.pop_back();
- assert(curr.end()<srcSize);
+ CHECK(curr.end()<srcSize);
const ConfusionNet::Column &currCol=src[curr.end()];
// in a given column, loop over all possibilities
for(size_t colidx=0; colidx<currCol.size(); ++colidx) {
@@ -395,7 +380,7 @@ public:
bool isEpsilon=(s=="" || s==EPSILON);
//assert that we have the right number of link params in this CN option
- assert(currCol[colidx].second.size() >= m_numInputScores);
+ CHECK(currCol[colidx].second.size() >= m_numInputScores);
// do not start with epsilon (except at first position)
if(isEpsilon && curr.begin()==curr.end() && curr.begin()>0) continue;
@@ -456,7 +441,7 @@ public:
//put in phrase table scores, logging as we insert
std::transform(tcands[i].scores.begin(),tcands[i].scores.end(),nscores.begin() + m_numInputScores,TransformScore);
- assert(nscores.size()==m_weights.size());
+ CHECK(nscores.size()==m_weights.size());
//tally up
float score=std::inner_product(nscores.begin(), nscores.end(), m_weights.begin(), 0.0f);
@@ -497,10 +482,10 @@ public:
m_rangeCache.resize(src.GetSize(),vTPC(src.GetSize(),0));
for(std::map<Range,E2Costs>::const_iterator i=cov2cand.begin(); i!=cov2cand.end(); ++i) {
- assert(i->first.first<m_rangeCache.size());
- assert(i->first.second>0);
- assert(static_cast<size_t>(i->first.second-1)<m_rangeCache[i->first.first].size());
- assert(m_rangeCache[i->first.first][i->first.second-1]==0);
+ CHECK(i->first.first<m_rangeCache.size());
+ CHECK(i->first.second>0);
+ CHECK(static_cast<size_t>(i->first.second-1)<m_rangeCache[i->first.first].size());
+ CHECK(m_rangeCache[i->first.first][i->first.second-1]==0);
std::vector<TargetPhrase> tCands;
tCands.reserve(i->second.size());
diff --git a/moses/src/Phrase.cpp b/moses/src/Phrase.cpp
index 4d33023c5..184ee53df 100644
--- a/moses/src/Phrase.cpp
+++ b/moses/src/Phrase.cpp
@@ -20,7 +20,7 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <algorithm>
#include <sstream>
#include <string>
@@ -36,31 +36,13 @@ using namespace std;
namespace Moses
{
-Phrase::Phrase(const Phrase &copy)
- :m_direction(copy.m_direction)
- ,m_words(copy.m_words)
-{
-}
-
-Phrase& Phrase::operator=(const Phrase& x)
-{
- if(this!=&x) {
-
- m_direction=x.m_direction;
- m_words = x.m_words;
- }
- return *this;
-}
-
-Phrase::Phrase(FactorDirection direction, size_t reserveSize)
- : m_direction(direction)
+Phrase::Phrase(size_t reserveSize)
{
m_words.reserve(reserveSize);
}
-Phrase::Phrase(FactorDirection direction, const vector< const Word* > &mergeWords)
- :m_direction(direction)
+Phrase::Phrase(const vector< const Word* > &mergeWords)
{
m_words.reserve(mergeWords.size());
for (size_t currPos = 0 ; currPos < mergeWords.size() ; currPos++) {
@@ -74,9 +56,9 @@ Phrase::~Phrase()
void Phrase::MergeFactors(const Phrase &copy)
{
- assert(GetSize() == copy.GetSize());
+ CHECK(GetSize() == copy.GetSize());
size_t size = GetSize();
- const size_t maxNumFactors = StaticData::Instance().GetMaxNumFactors(this->GetDirection());
+ const size_t maxNumFactors = MAX_NUM_FACTORS;
for (size_t currPos = 0 ; currPos < size ; currPos++) {
for (unsigned int currFactor = 0 ; currFactor < maxNumFactors ; currFactor++) {
FactorType factorType = static_cast<FactorType>(currFactor);
@@ -89,14 +71,14 @@ void Phrase::MergeFactors(const Phrase &copy)
void Phrase::MergeFactors(const Phrase &copy, FactorType factorType)
{
- assert(GetSize() == copy.GetSize());
+ CHECK(GetSize() == copy.GetSize());
for (size_t currPos = 0 ; currPos < GetSize() ; currPos++)
SetFactor(currPos, factorType, copy.GetFactor(currPos, factorType));
}
void Phrase::MergeFactors(const Phrase &copy, const std::vector<FactorType>& factorVec)
{
- assert(GetSize() == copy.GetSize());
+ CHECK(GetSize() == copy.GetSize());
for (size_t currPos = 0 ; currPos < GetSize() ; currPos++)
for (std::vector<FactorType>::const_iterator i = factorVec.begin();
i != factorVec.end(); ++i) {
@@ -107,7 +89,7 @@ void Phrase::MergeFactors(const Phrase &copy, const std::vector<FactorType>& fac
Phrase Phrase::GetSubString(const WordsRange &wordsRange) const
{
- Phrase retPhrase(m_direction, wordsRange.GetNumWordsCovered());
+ Phrase retPhrase(wordsRange.GetNumWordsCovered());
for (size_t currPos = wordsRange.GetStartPos() ; currPos <= wordsRange.GetEndPos() ; currPos++) {
Word &word = retPhrase.AddWord();
@@ -213,7 +195,7 @@ void Phrase::CreateFromStringNewFormat(FactorDirection direction
isNonTerminal = true;
size_t nextPos = annotatedWord.find("[", 1);
- assert(nextPos != string::npos);
+ CHECK(nextPos != string::npos);
if (direction == Input)
annotatedWord = annotatedWord.substr(1, nextPos - 2);
@@ -230,11 +212,11 @@ void Phrase::CreateFromStringNewFormat(FactorDirection direction
// lhs
string &annotatedWord = annotatedWordVector.back();
- assert(annotatedWord.substr(0, 1) == "[" && annotatedWord.substr(annotatedWord.size()-1, 1) == "]");
+ CHECK(annotatedWord.substr(0, 1) == "[" && annotatedWord.substr(annotatedWord.size()-1, 1) == "]");
annotatedWord = annotatedWord.substr(1, annotatedWord.size() - 2);
lhs.CreateFromString(direction, factorOrder, annotatedWord, true);
- assert(lhs.IsNonTerminal());
+ CHECK(lhs.IsNonTerminal());
}
int Phrase::Compare(const Phrase &other) const
@@ -302,7 +284,7 @@ bool Phrase::IsCompatible(const Phrase &inputPhrase) const
const size_t size = GetSize();
- const size_t maxNumFactors = StaticData::Instance().GetMaxNumFactors(this->GetDirection());
+ const size_t maxNumFactors = MAX_NUM_FACTORS;
for (size_t currPos = 0 ; currPos < size ; currPos++) {
for (unsigned int currFactor = 0 ; currFactor < maxNumFactors ; currFactor++) {
FactorType factorType = static_cast<FactorType>(currFactor);
diff --git a/moses/src/Phrase.h b/moses/src/Phrase.h
index 6c775d66f..9d828a76a 100644
--- a/moses/src/Phrase.h
+++ b/moses/src/Phrase.h
@@ -45,10 +45,6 @@ class Phrase
friend std::ostream& operator<<(std::ostream&, const Phrase&);
private:
- FactorDirection m_direction; /** Reusing Direction enum to really mean which language
- Input = Source, Output = Target.
- Not really used, but nice to know for debugging purposes
- */
std::vector<Word> m_words;
public:
@@ -56,16 +52,11 @@ public:
static void InitializeMemPool();
static void FinalizeMemPool();
- /** copy constructor */
- Phrase(const Phrase &copy);
- Phrase& operator=(const Phrase&);
-
/** create empty phrase
- * \param direction = language (Input = Source, Output = Target)
*/
- Phrase(FactorDirection direction, size_t reserveSize);
+ Phrase(size_t reserveSize);
/** create phrase from vectors of words */
- Phrase(FactorDirection direction, const std::vector< const Word* > &mergeWords);
+ Phrase(const std::vector< const Word* > &mergeWords);
/** destructor */
virtual ~Phrase();
@@ -99,11 +90,6 @@ public:
bool IsCompatible(const Phrase &inputPhrase, FactorType factorType) const;
bool IsCompatible(const Phrase &inputPhrase, const std::vector<FactorType>& factorVec) const;
- //! really means what language. Input = Source, Output = Target
- inline FactorDirection GetDirection() const {
- return m_direction;
- }
-
//! number of words
inline size_t GetSize() const {
return m_words.size();
@@ -150,13 +136,12 @@ public:
void RemoveWord(size_t pos)
{
- assert(pos < m_words.size());
+ CHECK(pos < m_words.size());
m_words.erase(m_words.begin() + pos);
}
//! create new phrase class that is a substring of this phrase
Phrase GetSubString(const WordsRange &wordsRange) const;
- Phrase GetSubString(const WordsRange &wordsRange, FactorType factorType) const;
//! return a string rep of the phrase. Each factor is separated by the factor delimiter as specified in StaticData class
std::string GetStringRep(const std::vector<FactorType> factorsToPrint) const;
diff --git a/moses/src/PhraseDictionary.cpp b/moses/src/PhraseDictionary.cpp
index 42c217dab..5cb64458e 100644
--- a/moses/src/PhraseDictionary.cpp
+++ b/moses/src/PhraseDictionary.cpp
@@ -87,7 +87,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
}
if (staticData.GetInputType() != SentenceInput) {
UserMessage::Add("Must use binary phrase table for this input type");
- assert(false);
+ CHECK(false);
}
PhraseDictionaryMemory* pdm = new PhraseDictionaryMemory(GetNumScoreComponents(),this);
@@ -97,7 +97,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWeightWordPenalty());
- assert(ret);
+ CHECK(ret);
return pdm;
} else if (m_implementation == Binary) {
PhraseDictionaryTreeAdaptor* pdta = new PhraseDictionaryTreeAdaptor(GetNumScoreComponents(), m_numInputScores,this);
@@ -108,7 +108,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWeightWordPenalty());
- assert(ret);
+ CHECK(ret);
return pdta;
} else if (m_implementation == SCFG) {
// memory phrase table
@@ -126,7 +126,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWordPenaltyProducer());
- assert(ret);
+ CHECK(ret);
return pdm;
} else if (m_implementation == Hiero) {
// memory phrase table
@@ -144,7 +144,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWordPenaltyProducer());
- assert(ret);
+ CHECK(ret);
return pdm;
} else if (m_implementation == ALSuffixArray) {
// memory phrase table
@@ -162,7 +162,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWordPenaltyProducer());
- assert(ret);
+ CHECK(ret);
return pdm;
} else if (m_implementation == OnDisk) {
@@ -174,7 +174,7 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
, m_tableLimit
, system->GetLanguageModels()
, system->GetWordPenaltyProducer());
- assert(ret);
+ CHECK(ret);
return pdta;
} else if (m_implementation == SuffixArray) {
#ifndef WIN32
@@ -195,11 +195,11 @@ PhraseDictionary* PhraseDictionaryFeature::LoadPhraseTable(const TranslationSyst
std::cerr << "Suffix array phrase table loaded" << std::endl;
return pd;
#else
- assert(false);
+ CHECK(false);
#endif
} else {
std::cerr << "Unknown phrase table type " << m_implementation << endl;
- assert(false);
+ CHECK(false);
}
}
@@ -230,7 +230,7 @@ void PhraseDictionaryFeature::InitDictionary(const TranslationSystem* system, co
}
dict = m_threadUnsafePhraseDictionary.get();
}
- assert(dict);
+ CHECK(dict);
dict->InitializeForInput(source);
}
@@ -242,7 +242,7 @@ const PhraseDictionary* PhraseDictionaryFeature::GetDictionary() const
} else {
dict = m_threadUnsafePhraseDictionary.get();
}
- assert(dict);
+ CHECK(dict);
return dict;
}
diff --git a/moses/src/PhraseDictionary.h b/moses/src/PhraseDictionary.h
index 2362f525e..e8f478079 100644
--- a/moses/src/PhraseDictionary.h
+++ b/moses/src/PhraseDictionary.h
@@ -76,7 +76,6 @@ public:
//! find list of translations that can translates a portion of src. Used by confusion network decoding
virtual const TargetPhraseCollection *GetTargetPhraseCollection(InputType const& src,WordsRange const& range) const;
//! Create entry for translation of source to targetPhrase
- virtual void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase)=0;
virtual void InitializeForInput(InputType const& source) = 0;
//! Create a sentence-specific manager for SCFG rule lookup.
diff --git a/moses/src/PhraseDictionaryALSuffixArray.cpp b/moses/src/PhraseDictionaryALSuffixArray.cpp
index 693a4ce66..26a9d2a31 100644
--- a/moses/src/PhraseDictionaryALSuffixArray.cpp
+++ b/moses/src/PhraseDictionaryALSuffixArray.cpp
@@ -58,7 +58,7 @@ void PhraseDictionaryALSuffixArray::InitializeForInput(InputType const& source)
bool ret = loader->Load(*m_input, *m_output, inFile, *m_weight, m_tableLimit,
*m_languageModels, m_wpProducer, *this);
- assert(ret);
+ CHECK(ret);
}
}
diff --git a/moses/src/PhraseDictionaryDynSuffixArray.cpp b/moses/src/PhraseDictionaryDynSuffixArray.cpp
index 72b1fe479..d4f0161db 100644
--- a/moses/src/PhraseDictionaryDynSuffixArray.cpp
+++ b/moses/src/PhraseDictionaryDynSuffixArray.cpp
@@ -40,7 +40,7 @@ bool PhraseDictionaryDynSuffixArray::Load(const std::vector<FactorType>& input,
void PhraseDictionaryDynSuffixArray::InitializeForInput(const InputType& input)
{
- assert(&input == &input);
+ CHECK(&input == &input);
}
void PhraseDictionaryDynSuffixArray::CleanUp()
@@ -81,7 +81,7 @@ void PhraseDictionaryDynSuffixArray::deleteSnt(unsigned /* idx */, unsigned /* n
ChartRuleLookupManager *PhraseDictionaryDynSuffixArray::CreateRuleLookupManager(const InputType&, const ChartCellCollection&)
{
- assert(false);
+ CHECK(false);
return 0;
}
diff --git a/moses/src/PhraseDictionaryDynSuffixArray.h b/moses/src/PhraseDictionaryDynSuffixArray.h
index 1bd16fc34..0d6be08dd 100644
--- a/moses/src/PhraseDictionaryDynSuffixArray.h
+++ b/moses/src/PhraseDictionaryDynSuffixArray.h
@@ -26,7 +26,6 @@ public:
// functions below required by base class
const TargetPhraseCollection* GetTargetPhraseCollection(const Phrase& src) const;
void InitializeForInput(const InputType& i);
- void AddEquivPhrase(const Phrase &, const TargetPhrase &) {}
void CleanUp();
void insertSnt(string&, string&, string&);
void deleteSnt(unsigned, unsigned);
diff --git a/moses/src/PhraseDictionaryMemory.cpp b/moses/src/PhraseDictionaryMemory.cpp
index 8e653326e..1ee449172 100644
--- a/moses/src/PhraseDictionaryMemory.cpp
+++ b/moses/src/PhraseDictionaryMemory.cpp
@@ -76,7 +76,7 @@ bool PhraseDictionaryMemory::Load(const std::vector<FactorType> &input
size_t numElement = NOT_FOUND; // 3=old format, 5=async format which include word alignment info
const std::string& factorDelimiter = staticData.GetFactorDelimiter();
- Phrase sourcePhrase(Input, 0);
+ Phrase sourcePhrase(0);
std::vector<float> scv;
scv.reserve(m_numScoreComponent);
@@ -196,12 +196,6 @@ TargetPhraseCollection *PhraseDictionaryMemory::CreateTargetPhraseCollection(con
return currNode->CreateTargetPhraseCollection();
}
-void PhraseDictionaryMemory::AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase)
-{
- TargetPhraseCollection &phraseColl = *CreateTargetPhraseCollection(source);
- phraseColl.Add(new TargetPhrase(targetPhrase));
-}
-
const TargetPhraseCollection *PhraseDictionaryMemory::GetTargetPhraseCollection(const Phrase &source) const
{
// exactly like CreateTargetPhraseCollection, but don't create
diff --git a/moses/src/PhraseDictionaryMemory.h b/moses/src/PhraseDictionaryMemory.h
index a38d9c842..dad7f4a9b 100644
--- a/moses/src/PhraseDictionaryMemory.h
+++ b/moses/src/PhraseDictionaryMemory.h
@@ -57,8 +57,6 @@ public:
const TargetPhraseCollection *GetTargetPhraseCollection(const Phrase &source) const;
- void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase);
-
// for mert
virtual void InitializeForInput(InputType const&) {
/* Don't do anything source specific here as this object is shared between threads.*/
@@ -67,7 +65,7 @@ public:
virtual ChartRuleLookupManager *CreateRuleLookupManager(
const InputType &,
const ChartCellCollection &) {
- assert(false);
+ CHECK(false);
return 0;
}
diff --git a/moses/src/PhraseDictionaryNodeSCFG.cpp b/moses/src/PhraseDictionaryNodeSCFG.cpp
index 5c5ab3d84..91f5f7acd 100644
--- a/moses/src/PhraseDictionaryNodeSCFG.cpp
+++ b/moses/src/PhraseDictionaryNodeSCFG.cpp
@@ -64,7 +64,7 @@ void PhraseDictionaryNodeSCFG::Sort(size_t tableLimit)
PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetOrCreateChild(const Word &sourceTerm)
{
- //assert(!sourceTerm.IsNonTerminal());
+ //CHECK(!sourceTerm.IsNonTerminal());
std::pair <TerminalMap::iterator,bool> insResult;
insResult = m_sourceTermMap.insert( std::make_pair(sourceTerm, PhraseDictionaryNodeSCFG()) );
@@ -75,8 +75,8 @@ PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetOrCreateChild(const Word
PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetOrCreateChild(const Word &sourceNonTerm, const Word &targetNonTerm)
{
- assert(sourceNonTerm.IsNonTerminal());
- assert(targetNonTerm.IsNonTerminal());
+ CHECK(sourceNonTerm.IsNonTerminal());
+ CHECK(targetNonTerm.IsNonTerminal());
NonTerminalMapKey key(sourceNonTerm, targetNonTerm);
std::pair <NonTerminalMap::iterator,bool> insResult;
@@ -88,7 +88,7 @@ PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetOrCreateChild(const Word
const PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetChild(const Word &sourceTerm) const
{
- assert(!sourceTerm.IsNonTerminal());
+ CHECK(!sourceTerm.IsNonTerminal());
TerminalMap::const_iterator p = m_sourceTermMap.find(sourceTerm);
return (p == m_sourceTermMap.end()) ? NULL : &p->second;
@@ -96,8 +96,8 @@ const PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetChild(const Word &s
const PhraseDictionaryNodeSCFG *PhraseDictionaryNodeSCFG::GetChild(const Word &sourceNonTerm, const Word &targetNonTerm) const
{
- assert(sourceNonTerm.IsNonTerminal());
- assert(targetNonTerm.IsNonTerminal());
+ CHECK(sourceNonTerm.IsNonTerminal());
+ CHECK(targetNonTerm.IsNonTerminal());
NonTerminalMapKey key(sourceNonTerm, targetNonTerm);
NonTerminalMap::const_iterator p = m_nonTermMap.find(key);
diff --git a/moses/src/PhraseDictionaryOnDisk.cpp b/moses/src/PhraseDictionaryOnDisk.cpp
index 410ee1ba1..04c29c449 100644
--- a/moses/src/PhraseDictionaryOnDisk.cpp
+++ b/moses/src/PhraseDictionaryOnDisk.cpp
@@ -58,10 +58,10 @@ bool PhraseDictionaryOnDisk::Load(const std::vector<FactorType> &input
if (!m_dbWrapper.BeginLoad(filePath))
return false;
- assert(m_dbWrapper.GetMisc("Version") == 3);
- assert(m_dbWrapper.GetMisc("NumSourceFactors") == input.size());
- assert(m_dbWrapper.GetMisc("NumTargetFactors") == output.size());
- assert(m_dbWrapper.GetMisc("NumScores") == weight.size());
+ CHECK(m_dbWrapper.GetMisc("Version") == 3);
+ CHECK(m_dbWrapper.GetMisc("NumSourceFactors") == input.size());
+ CHECK(m_dbWrapper.GetMisc("NumTargetFactors") == output.size());
+ CHECK(m_dbWrapper.GetMisc("NumScores") == weight.size());
return true;
}
@@ -71,21 +71,10 @@ bool PhraseDictionaryOnDisk::Load(const std::vector<FactorType> &input
//! find list of translations that can translates src. Only for phrase input
const TargetPhraseCollection *PhraseDictionaryOnDisk::GetTargetPhraseCollection(const Phrase& /* src */) const
{
- assert(false);
+ CHECK(false);
return NULL;
}
-void PhraseDictionaryOnDisk::AddEquivPhrase(const Phrase& /* source */, const TargetPhrase& /*targetPhrase */)
-{
- assert(false); // TODO
-}
-
-
-//! Create entry for translation of source to targetPhrase
-void PhraseDictionaryOnDisk::AddEquivPhrase(const Phrase& /* source */, TargetPhrase * /* targetPhrase */)
-{
-}
-
void PhraseDictionaryOnDisk::InitializeForInput(const InputType& /* input */)
{
// Nothing to do: sentence-specific state is stored in ChartRuleLookupManager
diff --git a/moses/src/PhraseDictionaryOnDisk.h b/moses/src/PhraseDictionaryOnDisk.h
index 44192dfdf..0a8dfc808 100644
--- a/moses/src/PhraseDictionaryOnDisk.h
+++ b/moses/src/PhraseDictionaryOnDisk.h
@@ -25,9 +25,9 @@
#include <vector>
#include <string>
#include "PhraseDictionary.h"
-#include "../../OnDiskPt/src/OnDiskWrapper.h"
-#include "../../OnDiskPt/src/Word.h"
-#include "../../OnDiskPt/src/PhraseNode.h"
+#include "../../OnDiskPt/OnDiskWrapper.h"
+#include "../../OnDiskPt/Word.h"
+#include "../../OnDiskPt/PhraseNode.h"
namespace Moses
{
@@ -76,11 +76,6 @@ public:
//! find list of translations that can translates src. Only for phrase input
virtual const TargetPhraseCollection *GetTargetPhraseCollection(const Phrase& src) const;
- void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase);
-
- //! Create entry for translation of source to targetPhrase
- virtual void AddEquivPhrase(const Phrase &source, TargetPhrase *targetPhrase);
-
void InitializeForInput(const InputType& input);
void CleanUp();
diff --git a/moses/src/PhraseDictionarySCFG.cpp b/moses/src/PhraseDictionarySCFG.cpp
index becd86155..c3a243990 100644
--- a/moses/src/PhraseDictionarySCFG.cpp
+++ b/moses/src/PhraseDictionarySCFG.cpp
@@ -89,8 +89,8 @@ PhraseDictionaryNodeSCFG &PhraseDictionarySCFG::GetOrCreateNode(const Phrase &so
// indexed by source label 1st
const Word &sourceNonTerm = word;
- assert(iterAlign != target.GetAlignmentInfo().end());
- assert(iterAlign->first == pos);
+ CHECK(iterAlign != target.GetAlignmentInfo().end());
+ CHECK(iterAlign->first == pos);
size_t targetNonTermInd = iterAlign->second;
++iterAlign;
const Word &targetNonTerm = target.GetWord(targetNonTermInd);
@@ -100,12 +100,12 @@ PhraseDictionaryNodeSCFG &PhraseDictionarySCFG::GetOrCreateNode(const Phrase &so
currNode = currNode->GetOrCreateChild(word);
}
- assert(currNode != NULL);
+ CHECK(currNode != NULL);
}
// finally, the source LHS
//currNode = currNode->GetOrCreateChild(sourceLHS);
- //assert(currNode != NULL);
+ //CHECK(currNode != NULL);
return *currNode;
diff --git a/moses/src/PhraseDictionarySCFG.h b/moses/src/PhraseDictionarySCFG.h
index bfc476cc4..e338dac50 100644
--- a/moses/src/PhraseDictionarySCFG.h
+++ b/moses/src/PhraseDictionarySCFG.h
@@ -56,16 +56,10 @@ class PhraseDictionarySCFG : public PhraseDictionary
// Required by PhraseDictionary.
const TargetPhraseCollection *GetTargetPhraseCollection(const Phrase &) const
{
- assert(false);
+ CHECK(false);
return NULL;
}
- // Required by PhraseDictionary.
- void AddEquivPhrase(const Phrase &, const TargetPhrase &)
- {
- assert(false);
- }
-
void InitializeForInput(const InputType& i);
void CleanUp();
diff --git a/moses/src/PhraseDictionaryTree.cpp b/moses/src/PhraseDictionaryTree.cpp
index 8f1cfdfe9..f7c779e28 100644
--- a/moses/src/PhraseDictionaryTree.cpp
+++ b/moses/src/PhraseDictionaryTree.cpp
@@ -3,7 +3,7 @@
#include "FeatureVector.h"
#include "PhraseDictionaryTree.h"
#include <map>
-#include <cassert>
+#include "util/check.hh"
#include <sstream>
#include <iostream>
#include <fstream>
@@ -227,7 +227,7 @@ public:
if(f.empty()) return;
if(f[0]>=data.size()) return;
if(!data[f[0]]) return;
- assert(data[f[0]]->findKey(f[0])<data[f[0]]->size());
+ CHECK(data[f[0]]->findKey(f[0])<data[f[0]]->size());
OFF_T tCandOffset=data[f[0]]->find(f);
if(tCandOffset==InvalidOffT) return;
fSeek(ot,tCandOffset);
@@ -239,7 +239,7 @@ public:
typedef PhraseDictionaryTree::PrefixPtr PPtr;
void GetTargetCandidates(PPtr p,TgtCands& tgtCands) {
- assert(p);
+ CHECK(p);
if(p.imp->isRoot()) return;
OFF_T tCandOffset=p.imp->ptr()->getData(p.imp->idx);
if(tCandOffset==InvalidOffT) return;
@@ -276,7 +276,7 @@ public:
}
PPtr Extend(PPtr p,const std::string& w) {
- assert(p);
+ CHECK(p);
if(w.empty() || w==EPSILON) return p;
LabelId wi=sv->index(w);
@@ -285,7 +285,7 @@ public:
else if(p.imp->isRoot()) {
if(wi<data.size() && data[wi]) {
const void* ptr = data[wi]->findKeyPtr(wi);
- assert(ptr);
+ CHECK(ptr);
return PPtr(pPool.get(PPimp(data[wi],data[wi]->findKey(wi),0)));
}
} else if(PTF const* nextP=p.imp->ptr()->getPtr(p.imp->idx)) {
@@ -510,7 +510,7 @@ int PhraseDictionaryTree::Create(std::istream& inFile,const std::string& out)
if (numElement == NOT_FOUND) {
// init numElement
numElement = tokens.size();
- assert(numElement >= 3);
+ CHECK(numElement >= 3);
}
if (tokens.size() != numElement) {
@@ -555,7 +555,7 @@ int PhraseDictionaryTree::Create(std::istream& inFile,const std::string& out)
++count;
currF=f;
// insert src phrase in prefix tree
- assert(psa);
+ CHECK(psa);
PSA::Data& d=psa->insert(f);
if(d==InvalidOffT) d=fTell(ot);
else {
@@ -608,7 +608,7 @@ int PhraseDictionaryTree::Create(std::istream& inFile,const std::string& out)
}
// insert src phrase in prefix tree
- assert(psa);
+ CHECK(psa);
PSA::Data& d=psa->insert(f);
if(d==InvalidOffT) d=fTell(ot);
else {
@@ -618,7 +618,7 @@ int PhraseDictionaryTree::Create(std::istream& inFile,const std::string& out)
}
}
tgtCands.push_back(TgtCand(e,sc, alignmentString));
- assert(currFirstWord!=InvalidLabelId);
+ CHECK(currFirstWord!=InvalidLabelId);
tgtCands.back().SetFeatures(fnames, fvalues);
}
if (PrintWordAlignment())
diff --git a/moses/src/PhraseDictionaryTreeAdaptor.cpp b/moses/src/PhraseDictionaryTreeAdaptor.cpp
index aa90bde81..cce4a2ee4 100644
--- a/moses/src/PhraseDictionaryTreeAdaptor.cpp
+++ b/moses/src/PhraseDictionaryTreeAdaptor.cpp
@@ -84,11 +84,6 @@ PhraseDictionaryTreeAdaptor::GetTargetPhraseCollection(InputType const& src,Word
}
}
-void PhraseDictionaryTreeAdaptor::
-AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase)
-{
- imp->AddEquivPhrase(source,targetPhrase);
-}
void PhraseDictionaryTreeAdaptor::EnableCache()
{
imp->useCache=1;
diff --git a/moses/src/PhraseDictionaryTreeAdaptor.h b/moses/src/PhraseDictionaryTreeAdaptor.h
index 5e834acb3..c38e234fb 100644
--- a/moses/src/PhraseDictionaryTreeAdaptor.h
+++ b/moses/src/PhraseDictionaryTreeAdaptor.h
@@ -4,7 +4,7 @@
#define moses_PhraseDictionaryTreeAdaptor_h
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include "TypeDef.h"
#include "PhraseDictionaryMemory.h"
#include "TargetPhraseCollection.h"
@@ -56,10 +56,6 @@ public:
TargetPhraseCollection const* GetTargetPhraseCollection(Phrase const &src) const;
TargetPhraseCollection const* GetTargetPhraseCollection(InputType const& src,WordsRange const & srcRange) const;
-
- // this function can be only used for UNKNOWN source phrases
- void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase);
-
std::string GetScoreProducerDescription(unsigned idx=0) const;
std::string GetScoreProducerWeightShortName(unsigned idx=0) const;
@@ -69,7 +65,7 @@ public:
virtual ChartRuleLookupManager *CreateRuleLookupManager(
const InputType &,
const ChartCellCollection &) {
- assert(false);
+ CHECK(false);
return 0;
}
};
diff --git a/moses/src/PrefixTree.h b/moses/src/PrefixTree.h
index c6eafb4ce..3215fb834 100644
--- a/moses/src/PrefixTree.h
+++ b/moses/src/PrefixTree.h
@@ -9,7 +9,7 @@
#include <vector>
#include <algorithm>
-#include <cassert>
+#include "util/check.hh"
#include <deque>
#include "Util.h"
#include "FilePtr.h"
diff --git a/moses/src/PrefixTreeMap.cpp b/moses/src/PrefixTreeMap.cpp
index d4527aae4..7e5512a05 100644
--- a/moses/src/PrefixTreeMap.cpp
+++ b/moses/src/PrefixTreeMap.cpp
@@ -1,6 +1,10 @@
#include "PrefixTreeMap.h"
#include "TypeDef.h"
+#ifdef WITH_THREADS
+#include <boost/thread.hpp>
+#endif
+
namespace Moses
{
void GenericCandidate::readBin(FILE* f)
@@ -143,7 +147,7 @@ void PrefixTreeMap::GetCandidates(const IPhrase& key, Candidates* cands)
if(key.empty() || key[0] >= m_Data.size() || !m_Data[key[0]]) {
return;
}
- assert(m_Data[key[0]]->findKey(key[0])<m_Data[key[0]]->size());
+ CHECK(m_Data[key[0]]->findKey(key[0])<m_Data[key[0]]->size());
OFF_T candOffset = m_Data[key[0]]->find(key);
if(candOffset == InvalidOffT) {
@@ -155,7 +159,7 @@ void PrefixTreeMap::GetCandidates(const IPhrase& key, Candidates* cands)
void PrefixTreeMap::GetCandidates(const PPimp& p, Candidates* cands)
{
- assert(p.isValid());
+ CHECK(p.isValid());
if(p.isRoot()) {
return;
};
@@ -169,7 +173,7 @@ void PrefixTreeMap::GetCandidates(const PPimp& p, Candidates* cands)
std::vector< std::string const * > PrefixTreeMap::ConvertPhrase(const IPhrase& p, unsigned int voc) const
{
- assert(voc < m_Voc.size() && m_Voc[voc] != 0);
+ CHECK(voc < m_Voc.size() && m_Voc[voc] != 0);
std::vector< std::string const * > result;
result.reserve(p.size());
for(IPhrase::const_iterator i = p.begin(); i != p.end(); ++i) {
@@ -180,7 +184,7 @@ std::vector< std::string const * > PrefixTreeMap::ConvertPhrase(const IPhrase& p
IPhrase PrefixTreeMap::ConvertPhrase(const std::vector< std::string >& p, unsigned int voc) const
{
- assert(voc < m_Voc.size() && m_Voc[voc] != 0);
+ CHECK(voc < m_Voc.size() && m_Voc[voc] != 0);
IPhrase result;
result.reserve(p.size());
for(size_t i = 0; i < p.size(); ++i) {
@@ -191,13 +195,13 @@ IPhrase PrefixTreeMap::ConvertPhrase(const std::vector< std::string >& p, unsign
LabelId PrefixTreeMap::ConvertWord(const std::string& w, unsigned int voc) const
{
- assert(voc < m_Voc.size() && m_Voc[voc] != 0);
+ CHECK(voc < m_Voc.size() && m_Voc[voc] != 0);
return m_Voc[voc]->index(w);
}
std::string PrefixTreeMap::ConvertWord(LabelId w, unsigned int voc) const
{
- assert(voc < m_Voc.size() && m_Voc[voc] != 0);
+ CHECK(voc < m_Voc.size() && m_Voc[voc] != 0);
if(w == PrefixTreeMap::MagicWord) {
return "|||";
} else if (w == InvalidLabelId) {
@@ -214,7 +218,7 @@ PPimp* PrefixTreeMap::GetRoot()
PPimp* PrefixTreeMap::Extend(PPimp* p, LabelId wi)
{
- assert(p->isValid());
+ CHECK(p->isValid());
if(wi == InvalidLabelId) {
return 0; // unknown word, return invalid pointer
@@ -222,7 +226,7 @@ PPimp* PrefixTreeMap::Extend(PPimp* p, LabelId wi)
} else if(p->isRoot()) {
if(wi < m_Data.size() && m_Data[wi]) {
const void* ptr = m_Data[wi]->findKeyPtr(wi);
- assert(ptr);
+ CHECK(ptr);
return m_PtrPool.get(PPimp(m_Data[wi],m_Data[wi]->findKey(wi),0));
}
} else if(PTF const* nextP = p->ptr()->getPtr(p->idx)) {
diff --git a/moses/src/PrefixTreeMap.h b/moses/src/PrefixTreeMap.h
index faa851037..6bb6b769f 100644
--- a/moses/src/PrefixTreeMap.h
+++ b/moses/src/PrefixTreeMap.h
@@ -6,10 +6,6 @@
#include<iostream>
#include <map>
-#ifdef WITH_THREADS
-#include <boost/thread/mutex.hpp>
-#endif
-
#include "PrefixTree.h"
#include "File.h"
diff --git a/moses/src/RuleCube.h b/moses/src/RuleCube.h
index 2917c195b..5d6aabe46 100644
--- a/moses/src/RuleCube.h
+++ b/moses/src/RuleCube.h
@@ -31,7 +31,7 @@
#include <boost/unordered_set.hpp>
#include <boost/version.hpp>
-#include <cassert>
+#include "util/check.hh"
#include <queue>
#include <set>
#include <vector>
@@ -93,7 +93,7 @@ class RuleCube
~RuleCube();
float GetTopScore() const {
- assert(!m_queue.empty());
+ CHECK(!m_queue.empty());
RuleCubeItem *item = m_queue.top();
return item->GetScore();
}
diff --git a/moses/src/RuleCubeItem.cpp b/moses/src/RuleCubeItem.cpp
index 84c00fc42..fa4b4c51e 100644
--- a/moses/src/RuleCubeItem.cpp
+++ b/moses/src/RuleCubeItem.cpp
@@ -86,7 +86,7 @@ void RuleCubeItem::CreateHypothesis(const ChartTranslationOption &transOpt,
ChartHypothesis *RuleCubeItem::ReleaseHypothesis()
{
- assert(m_hypothesis);
+ CHECK(m_hypothesis);
ChartHypothesis *hypo = m_hypothesis;
m_hypothesis = 0;
return hypo;
@@ -98,7 +98,7 @@ void RuleCubeItem::CreateHypothesisDimensions(
const DottedRule &dottedRule,
const ChartCellCollection &allChartCells)
{
- assert(!dottedRule.IsRoot());
+ CHECK(!dottedRule.IsRoot());
const DottedRule *prev = dottedRule.GetPrev();
if (!prev->IsRoot()) {
@@ -110,12 +110,12 @@ void RuleCubeItem::CreateHypothesisDimensions(
// get a sorted list of the underlying hypotheses
const ChartCellLabel &cellLabel = dottedRule.GetChartCellLabel();
const ChartHypothesisCollection *hypoColl = cellLabel.GetStack();
- assert(hypoColl);
+ CHECK(hypoColl);
const HypoList &hypoList = hypoColl->GetSortedHypotheses();
// there have to be hypothesis with the desired non-terminal
// (otherwise the rule would not be considered)
- assert(!hypoList.empty());
+ CHECK(!hypoList.empty());
// create a list of hypotheses that match the non-terminal
HypothesisDimension dimension(0, hypoList);
diff --git a/moses/src/RuleTableLoaderCompact.cpp b/moses/src/RuleTableLoaderCompact.cpp
index 39d437b80..21d146bec 100644
--- a/moses/src/RuleTableLoaderCompact.cpp
+++ b/moses/src/RuleTableLoaderCompact.cpp
@@ -63,12 +63,12 @@ bool RuleTableLoaderCompact::Load(const std::vector<FactorType> &input,
// Load source phrases.
std::vector<Phrase> sourcePhrases;
std::vector<size_t> sourceLhsIds;
- LoadPhraseSection(reader, Input, vocab, sourcePhrases, sourceLhsIds);
+ LoadPhraseSection(reader, vocab, sourcePhrases, sourceLhsIds);
// Load target phrases.
std::vector<Phrase> targetPhrases;
std::vector<size_t> targetLhsIds;
- LoadPhraseSection(reader, Output, vocab, targetPhrases, targetLhsIds);
+ LoadPhraseSection(reader, vocab, targetPhrases, targetLhsIds);
// Load alignments.
std::vector<const AlignmentInfo *> alignmentSets;
@@ -111,7 +111,6 @@ void RuleTableLoaderCompact::LoadVocabularySection(
void RuleTableLoaderCompact::LoadPhraseSection(
LineReader &reader,
- FactorDirection direction,
const std::vector<Word> &vocab,
std::vector<Phrase> &rhsPhrases,
std::vector<size_t> &lhsIds)
@@ -121,7 +120,7 @@ void RuleTableLoaderCompact::LoadPhraseSection(
const size_t phraseCount = std::atoi(reader.m_line.c_str());
// Reads lines, storing Phrase object for each RHS and vocab ID for each LHS.
- rhsPhrases.resize(phraseCount, Phrase(direction, 0));
+ rhsPhrases.resize(phraseCount, Phrase(0));
lhsIds.resize(phraseCount);
std::vector<size_t> tokenPositions;
for (size_t i = 0; i < phraseCount; ++i) {
diff --git a/moses/src/RuleTableLoaderCompact.h b/moses/src/RuleTableLoaderCompact.h
index 9f960683d..add082291 100644
--- a/moses/src/RuleTableLoaderCompact.h
+++ b/moses/src/RuleTableLoaderCompact.h
@@ -65,7 +65,6 @@ class RuleTableLoaderCompact : public RuleTableLoader
std::vector<Word> &);
void LoadPhraseSection(LineReader &,
- FactorDirection,
const std::vector<Word> &,
std::vector<Phrase> &,
std::vector<size_t> &);
diff --git a/moses/src/RuleTableLoaderStandard.cpp b/moses/src/RuleTableLoaderStandard.cpp
index 8a83bf7b4..190241a13 100644
--- a/moses/src/RuleTableLoaderStandard.cpp
+++ b/moses/src/RuleTableLoaderStandard.cpp
@@ -70,7 +70,7 @@ void ReformatHieroRule(int sourceTarget, string &phrase, map<size_t, pair<size_t
if (tok.substr(0, 1) == "[" && tok.substr(tokLen - 1, 1) == "]")
{ // no-term
vector<string> split = Tokenize(tok, ",");
- assert(split.size() == 2);
+ CHECK(split.size() == 2);
tok = "[X]" + split[0] + "]";
size_t coIndex = Scan<size_t>(split[1]);
@@ -202,7 +202,7 @@ bool RuleTableLoaderStandard::Load(FormatType format
UserMessage::Add(strme.str());
abort();
}
- assert(scoreVector.size() == numScoreComponents);
+ CHECK(scoreVector.size() == numScoreComponents);
// parse source & find pt node
@@ -210,7 +210,7 @@ bool RuleTableLoaderStandard::Load(FormatType format
Word sourceLHS, targetLHS;
// source
- Phrase sourcePhrase(Input, 0);
+ Phrase sourcePhrase( 0);
sourcePhrase.CreateFromStringNewFormat(Input, input, sourcePhraseString, factorDelimiter, sourceLHS);
// create target phrase obj
diff --git a/moses/src/ScoreComponentCollection.h b/moses/src/ScoreComponentCollection.h
index 8a873437e..bb28fd8df 100644
--- a/moses/src/ScoreComponentCollection.h
+++ b/moses/src/ScoreComponentCollection.h
@@ -23,7 +23,6 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#define moses_ScoreComponentCollection_h
#include <numeric>
-#include <cassert>
#include <sstream>
#ifdef MPI_ENABLE
@@ -31,6 +30,8 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include <boost/serialization/split_member.hpp>
#endif
+#include "util/check.hh"
+
#include "LMList.h"
#include "ScoreProducer.h"
#include "FeatureVector.h"
@@ -72,7 +73,7 @@ private:
static IndexPair GetIndexes(const ScoreProducer* sp)
{
ScoreIndexMap::const_iterator indexIter = s_scoreIndexes.find(sp);
- assert(indexIter != s_scoreIndexes.end());
+ CHECK(indexIter != s_scoreIndexes.end());
return indexIter->second;
}
@@ -167,7 +168,7 @@ public:
void PlusEquals(const ScoreProducer* sp, const std::vector<float>& scores)
{
IndexPair indexes = GetIndexes(sp);
- assert(scores.size() == indexes.second - indexes.first);
+ CHECK(scores.size() == indexes.second - indexes.first);
for (size_t i = 0; i < scores.size(); ++i) {
m_scores[i + indexes.first] += scores[i];
}
@@ -179,14 +180,14 @@ public:
void PlusEquals(const ScoreProducer* sp, float score)
{
IndexPair indexes = GetIndexes(sp);
- assert(1 == indexes.second - indexes.first);
+ CHECK(1 == indexes.second - indexes.first);
m_scores[indexes.first] += score;
}
//For features which have an unbounded number of components
void PlusEquals(const ScoreProducer*sp, const std::string& name, float score)
{
- assert(sp->GetNumScoreComponents() == ScoreProducer::unlimited);
+ CHECK(sp->GetNumScoreComponents() == ScoreProducer::unlimited);
FName fname(sp->GetScoreProducerDescription(),name);
m_scores[fname] += score;
}
@@ -194,7 +195,7 @@ public:
void Assign(const ScoreProducer* sp, const std::vector<float>& scores)
{
IndexPair indexes = GetIndexes(sp);
- assert(scores.size() == indexes.second - indexes.first);
+ CHECK(scores.size() == indexes.second - indexes.first);
for (size_t i = 0; i < scores.size(); ++i) {
m_scores[i + indexes.first] = scores[i];
}
@@ -206,16 +207,17 @@ public:
void Assign(const ScoreProducer* sp, float score)
{
IndexPair indexes = GetIndexes(sp);
- assert(1 == indexes.second - indexes.first);
+ CHECK(1 == indexes.second - indexes.first);
m_scores[indexes.first] = score;
}
- //For features which have an unbounded number of components
- void Assign(const ScoreProducer*sp, const std::string name, float score)
- {
- assert(sp->GetNumScoreComponents() == ScoreProducer::unlimited);
- FName fname(sp->GetScoreProducerDescription(),name);
- m_scores[fname] = score;
+ //! Special version PlusEquals(ScoreProducer, vector<float>)
+ //! to add the score from a single ScoreProducer that produces
+ //! a single value
+ void Assign(const ScoreProducer* sp, float score) {
+ CHECK(1 == sp->GetNumScoreComponents());
+ const size_t i = m_sim->GetBeginIndex(sp->GetScoreBookkeepingID());
+ m_scores[i] = score;
}
//Read sparse features from string
@@ -236,7 +238,7 @@ public:
float PartialInnerProduct(const ScoreProducer* sp, const std::vector<float>& rhs) const
{
std::vector<float> lhs = GetScoresForProducer(sp);
- assert(lhs.size() == rhs.size());
+ CHECK(lhs.size() == rhs.size());
return std::inner_product(lhs.begin(), lhs.end(), rhs.begin(), 0.0f);
}
@@ -284,7 +286,7 @@ public:
float GetScoreForProducer(const ScoreProducer* sp) const
{
IndexPair indexes = GetIndexes(sp);
- assert(indexes.second - indexes.first == 1);
+ CHECK(indexes.second - indexes.first == 1);
return m_scores[indexes.first];
}
@@ -292,7 +294,7 @@ public:
float GetScoreForProducer
(const ScoreProducer* sp, const std::string& name) const
{
- assert(sp->GetNumScoreComponents() == ScoreProducer::unlimited);
+ CHECK(sp->GetNumScoreComponents() == ScoreProducer::unlimited);
FName fname(sp->GetScoreProducerDescription(),name);
return m_scores[fname];
}
diff --git a/moses/src/SearchCubePruning.cpp b/moses/src/SearchCubePruning.cpp
index 452e9ff61..4f98208bb 100644
--- a/moses/src/SearchCubePruning.cpp
+++ b/moses/src/SearchCubePruning.cpp
@@ -41,7 +41,7 @@ SearchCubePruning::SearchCubePruning(Manager& manager, const InputType &source,
:Search(manager)
,m_source(source)
,m_hypoStackColl(source.GetSize() + 1)
- ,m_initialTargetPhrase(Output, source.m_initialTargetPhrase)
+ ,m_initialTargetPhrase(source.m_initialTargetPhrase)
,m_start(clock())
,m_transOptColl(transOptColl)
{
diff --git a/moses/src/SearchNormal.cpp b/moses/src/SearchNormal.cpp
index 334a73bcd..9f26270e9 100644
--- a/moses/src/SearchNormal.cpp
+++ b/moses/src/SearchNormal.cpp
@@ -16,7 +16,7 @@ SearchNormal::SearchNormal(Manager& manager, const InputType &source, const Tran
:Search(manager)
,m_source(source)
,m_hypoStackColl(source.GetSize() + 1)
- ,m_initialTargetPhrase(Output, source.m_initialTargetPhrase)
+ ,m_initialTargetPhrase(source.m_initialTargetPhrase)
,m_start(clock())
,interrupted_flag(0)
,m_transOptColl(transOptColl)
diff --git a/moses/src/Sentence.cpp b/moses/src/Sentence.cpp
index 1b53aedf7..87ede7c7a 100644
--- a/moses/src/Sentence.cpp
+++ b/moses/src/Sentence.cpp
@@ -33,11 +33,10 @@ using namespace std;
namespace Moses
{
-Sentence::Sentence(FactorDirection direction)
- : Phrase(direction, 0)
+Sentence::Sentence()
+ : Phrase(0)
, InputType()
{
- assert(direction == Input);
const StaticData& staticData = StaticData::Instance();
if (staticData.GetSearchAlgorithm() == ChartDecoding) {
m_defaultLabelSet.insert(StaticData::Instance().GetInputDefaultNonTerminal());
@@ -176,7 +175,7 @@ Sentence::CreateTranslationOptionCollection(const TranslationSystem* system) con
size_t maxNoTransOptPerCoverage = StaticData::Instance().GetMaxNoTransOptPerCoverage();
float transOptThreshold = StaticData::Instance().GetTranslationOptionThreshold();
TranslationOptionCollection *rv= new TranslationOptionCollectionText(system, *this, maxNoTransOptPerCoverage, transOptThreshold);
- assert(rv);
+ CHECK(rv);
return rv;
}
void Sentence::Print(std::ostream& out) const
diff --git a/moses/src/Sentence.h b/moses/src/Sentence.h
index 20b34c4c7..b1c25f9dd 100644
--- a/moses/src/Sentence.h
+++ b/moses/src/Sentence.h
@@ -60,7 +60,7 @@ private:
public:
- Sentence(FactorDirection direction);
+ Sentence();
InputTypeEnum GetType() const {
return SentenceInput;
diff --git a/moses/src/StaticData.cpp b/moses/src/StaticData.cpp
index 2d4bafb25..d24555f77 100644
--- a/moses/src/StaticData.cpp
+++ b/moses/src/StaticData.cpp
@@ -21,7 +21,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
#include <string>
-#include <cassert>
+#include "util/check.hh"
#include "PhraseDictionaryMemory.h"
#include "DecodeStepTranslation.h"
#include "DecodeStepGeneration.h"
@@ -482,16 +482,16 @@ bool StaticData::LoadData(Parameter *parameter)
if (vecStr.size() == 1) {
sentenceID++;
- Phrase phrase(Output, 0);
+ Phrase phrase(0);
phrase.CreateFromString(GetOutputFactorOrder(), vecStr[0], GetFactorDelimiter());
m_constraints.insert(make_pair(sentenceID,phrase));
} else if (vecStr.size() == 2) {
sentenceID = Scan<long>(vecStr[0]);
- Phrase phrase(Output, 0);
+ Phrase phrase(0);
phrase.CreateFromString(GetOutputFactorOrder(), vecStr[1], GetFactorDelimiter());
m_constraints.insert(make_pair(sentenceID,phrase));
} else {
- assert(false);
+ CHECK(false);
}
}
}
@@ -1057,14 +1057,14 @@ bool StaticData::LoadGenerationTables()
VERBOSE(1, filePath << endl);
m_generationDictionary.push_back(new GenerationDictionary(numFeatures, input,output));
- assert(m_generationDictionary.back() && "could not create GenerationDictionary");
+ CHECK(m_generationDictionary.back() && "could not create GenerationDictionary");
if (!m_generationDictionary.back()->Load(filePath, Output)) {
delete m_generationDictionary.back();
return false;
}
vector<float> gdWeights;
for(size_t i = 0; i < numFeatures; i++) {
- assert(currWeightNum < weight.size());
+ CHECK(currWeightNum < weight.size());
gdWeights.push_back(weight[currWeightNum++]);
}
SetWeights(m_generationDictionary.back(), gdWeights);
@@ -1083,7 +1083,7 @@ bool StaticData::LoadPhraseTables()
VERBOSE(2,"Creating phrase table features" << endl);
// language models must be loaded prior to loading phrase tables
- assert(m_fLMsLoaded);
+ CHECK(m_fLMsLoaded);
// load phrase translation tables
if (m_parameter->GetParam("ttable-file").size() > 0) {
// weights
@@ -1130,7 +1130,7 @@ bool StaticData::LoadPhraseTables()
} else
implementation = (PhraseTableImplementation) Scan<int>(token[0]);
- assert(token.size() >= 5);
+ CHECK(token.size() >= 5);
//characteristics of the phrase table
vector<FactorType> input = Tokenize<FactorType>(token[1], ",")
@@ -1141,7 +1141,7 @@ bool StaticData::LoadPhraseTables()
size_t numScoreComponent = Scan<size_t>(token[3]);
string filePath= token[4];
- assert(weightAll.size() >= weightAllOffset + numScoreComponent);
+ CHECK(weightAll.size() >= weightAllOffset + numScoreComponent);
// weights for this phrase dictionary
// first InputScores (if any), then translation scores
@@ -1205,7 +1205,7 @@ bool StaticData::LoadPhraseTables()
alignmentsFile= token[6];
}
- assert(numScoreComponent==weight.size());
+ CHECK(numScoreComponent==weight.size());
//This is needed for regression testing, but the phrase table
@@ -1282,7 +1282,7 @@ void StaticData::LoadNonTerminals()
string line;
while(getline(inStream, line)) {
vector<string> tokens = Tokenize(line);
- assert(tokens.size() == 2);
+ CHECK(tokens.size() == 2);
UnknownLHSEntry entry(tokens[0], Scan<float>(tokens[1]));
m_unknownLHS.push_back(entry);
}
@@ -1342,7 +1342,7 @@ bool StaticData::LoadDecodeGraphs()
// For specifying multiple translation model
decodeGraphInd = Scan<size_t>(token[0]);
//the vectorList index can only increment by one
- assert(decodeGraphInd == prevDecodeGraphInd || decodeGraphInd == prevDecodeGraphInd + 1);
+ CHECK(decodeGraphInd == prevDecodeGraphInd || decodeGraphInd == prevDecodeGraphInd + 1);
if (decodeGraphInd > prevDecodeGraphInd) {
prev = NULL;
}
@@ -1350,7 +1350,7 @@ bool StaticData::LoadDecodeGraphs()
index = Scan<size_t>(token[2]);
} else {
UserMessage::Add("Malformed mapping!");
- assert(false);
+ CHECK(false);
}
DecodeStep* decodeStep = NULL;
@@ -1361,7 +1361,7 @@ bool StaticData::LoadDecodeGraphs()
strme << "No phrase dictionary with index "
<< index << " available!";
UserMessage::Add(strme.str());
- assert(false);
+ CHECK(false);
}
decodeStep = new DecodeStepTranslation(m_phraseDictionary[index], prev);
break;
@@ -1371,16 +1371,16 @@ bool StaticData::LoadDecodeGraphs()
strme << "No generation dictionary with index "
<< index << " available!";
UserMessage::Add(strme.str());
- assert(false);
+ CHECK(false);
}
decodeStep = new DecodeStepGeneration(m_generationDictionary[index], prev);
break;
case InsertNullFertilityWord:
- assert(!"Please implement NullFertilityInsertion.");
+ CHECK(!"Please implement NullFertilityInsertion.");
break;
}
- assert(decodeStep);
+ CHECK(decodeStep);
if (m_decodeGraphs.size() < decodeGraphInd + 1) {
DecodeGraph *decodeGraph;
if (m_searchAlgorithm == ChartDecoding) {
diff --git a/moses/src/SyntacticLanguageModelFiles.h b/moses/src/SyntacticLanguageModelFiles.h
index 318e22636..c6533c424 100644
--- a/moses/src/SyntacticLanguageModelFiles.h
+++ b/moses/src/SyntacticLanguageModelFiles.h
@@ -37,7 +37,7 @@ template <class MH, class MO>
std::cerr << "Reading syntactic language model files...\n";
// For each model file...
for ( int a=0, n=filePaths.size(); a<n; a++ ) { // read models
- FILE* pf = fopen(filePaths[a].c_str(),"r"); //assert(pf); // Read model file
+ FILE* pf = fopen(filePaths[a].c_str(),"r"); //CHECK(pf); // Read model file
if(!pf){
std::cerr << "Error loading model file " << filePaths[a] << std::endl;
return;
diff --git a/moses/src/TargetPhrase.cpp b/moses/src/TargetPhrase.cpp
index 8aef3e855..74b3f1132 100644
--- a/moses/src/TargetPhrase.cpp
+++ b/moses/src/TargetPhrase.cpp
@@ -19,7 +19,7 @@ License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
***********************************************************************/
-#include <cassert>
+#include "util/check.hh"
#include <algorithm>
#include <boost/lexical_cast.hpp>
#include "util/tokenize_piece.hh"
@@ -39,8 +39,8 @@ using namespace std;
namespace Moses
{
-TargetPhrase::TargetPhrase(FactorDirection direction, std::string out_string)
- :Phrase(direction, 0),m_transScore(0.0), m_fullScore(0.0), m_sourcePhrase(direction,0)
+TargetPhrase::TargetPhrase(std::string out_string)
+ :Phrase(0),m_transScore(0.0), m_fullScore(0.0), m_sourcePhrase(0)
, m_alignmentInfo(&AlignmentInfoCollection::Instance().GetEmptyAlignmentInfo())
{
@@ -50,8 +50,8 @@ TargetPhrase::TargetPhrase(FactorDirection direction, std::string out_string)
}
-TargetPhrase::TargetPhrase(FactorDirection direction)
- :Phrase(direction, ARRAY_SIZE_INCR)
+TargetPhrase::TargetPhrase()
+ :Phrase(ARRAY_SIZE_INCR)
, m_transScore(0.0)
, m_fullScore(0.0)
,m_sourcePhrase(direction, 0)
@@ -124,7 +124,7 @@ void TargetPhrase::SetScore(const TranslationSystem* system, const Scores &score
vector<float> weights = StaticData::Instance().GetWeights(prod);
//expand the input weight vector
- assert(scoreVector.size() <= prod->GetNumScoreComponents());
+ CHECK(scoreVector.size() <= prod->GetNumScoreComponents());
Scores sizedScoreVector = scoreVector;
sizedScoreVector.resize(prod->GetNumScoreComponents(),0.0f);
@@ -137,7 +137,7 @@ void TargetPhrase::SetScore(const ScoreProducer* translationScoreProducer,
const vector<float> &weightT,
float weightWP, const LMList &languageModels)
{
- assert(weightT.size() == scoreVector.size());
+ CHECK(weightT.size() == scoreVector.size());
// calc average score if non-best
m_transScore = std::inner_product(scoreVector.begin(), scoreVector.end(), weightT.begin(), 0.0f);
@@ -191,7 +191,7 @@ void TargetPhrase::SetScoreChart(const ScoreProducer* translationScoreProducer,
,const WordPenaltyProducer* wpProducer)
{
- assert(weightT.size() == scoreVector.size());
+ CHECK(weightT.size() == scoreVector.size());
// calc average score if non-best
m_transScore = std::inner_product(scoreVector.begin(), scoreVector.end(), weightT.begin(), 0.0f);
@@ -252,7 +252,7 @@ void TargetPhrase::SetScore(const ScoreProducer* producer, const Scores &scoreVe
void TargetPhrase::SetWeights(const ScoreProducer* translationScoreProducer, const vector<float> &weightT)
{
// calling this function in case of confusion net input is undefined
- assert(StaticData::Instance().GetInputType()==SentenceInput);
+ CHECK(StaticData::Instance().GetInputType()==SentenceInput);
/* one way to fix this, you have to make sure the weightT contains (in
addition to the usual phrase translation scaling factors) the input
diff --git a/moses/src/TargetPhrase.h b/moses/src/TargetPhrase.h
index 149cd8ec2..cb14fbfb1 100644
--- a/moses/src/TargetPhrase.h
+++ b/moses/src/TargetPhrase.h
@@ -61,8 +61,8 @@ protected:
Word m_lhsTarget;
public:
- TargetPhrase(FactorDirection direction=Output);
- TargetPhrase(FactorDirection direction, std::string out_string);
+ TargetPhrase();
+ TargetPhrase(std::string out_string);
TargetPhrase(const Phrase &);
~TargetPhrase();
diff --git a/moses/src/TranslationOption.cpp b/moses/src/TranslationOption.cpp
index e2e875af3..c823dadb8 100644
--- a/moses/src/TranslationOption.cpp
+++ b/moses/src/TranslationOption.cpp
@@ -88,7 +88,7 @@ TranslationOption::TranslationOption(const WordsRange &wordsRange
TranslationOption::TranslationOption(const TranslationOption &copy)
: m_targetPhrase(copy.m_targetPhrase)
//, m_sourcePhrase(new Phrase(*copy.m_sourcePhrase)) // TODO use when confusion network trans opt for confusion net properly implemented
- , m_sourcePhrase( (copy.m_sourcePhrase == NULL) ? new Phrase(Input, ARRAY_SIZE_INCR) : new Phrase(*copy.m_sourcePhrase))
+ , m_sourcePhrase( (copy.m_sourcePhrase == NULL) ? new Phrase(ARRAY_SIZE_INCR) : new Phrase(*copy.m_sourcePhrase))
, m_sourceWordsRange(copy.m_sourceWordsRange)
, m_futureScore(copy.m_futureScore)
, m_scoreBreakdown(copy.m_scoreBreakdown)
@@ -98,7 +98,7 @@ TranslationOption::TranslationOption(const TranslationOption &copy)
TranslationOption::TranslationOption(const TranslationOption &copy, const WordsRange &sourceWordsRange)
: m_targetPhrase(copy.m_targetPhrase)
//, m_sourcePhrase(new Phrase(*copy.m_sourcePhrase)) // TODO use when confusion network trans opt for confusion net properly implemented
- , m_sourcePhrase( (copy.m_sourcePhrase == NULL) ? new Phrase(Input, ARRAY_SIZE_INCR) : new Phrase(*copy.m_sourcePhrase))
+ , m_sourcePhrase( (copy.m_sourcePhrase == NULL) ? new Phrase(ARRAY_SIZE_INCR) : new Phrase(*copy.m_sourcePhrase))
, m_sourceWordsRange(sourceWordsRange)
, m_futureScore(copy.m_futureScore)
, m_scoreBreakdown(copy.m_scoreBreakdown)
@@ -107,7 +107,7 @@ TranslationOption::TranslationOption(const TranslationOption &copy, const WordsR
void TranslationOption::MergeNewFeatures(const Phrase& phrase, const ScoreComponentCollection& score, const std::vector<FactorType>& featuresToAdd)
{
- assert(phrase.GetSize() == m_targetPhrase.GetSize());
+ CHECK(phrase.GetSize() == m_targetPhrase.GetSize());
if (featuresToAdd.size() == 1) {
m_targetPhrase.MergeFactors(phrase, featuresToAdd[0]);
} else if (featuresToAdd.empty()) {
diff --git a/moses/src/TranslationOptionCollection.cpp b/moses/src/TranslationOptionCollection.cpp
index 826234e2e..d9eefc9d6 100644
--- a/moses/src/TranslationOptionCollection.cpp
+++ b/moses/src/TranslationOptionCollection.cpp
@@ -221,7 +221,7 @@ void TranslationOptionCollection::ProcessOneUnknownWord(const Word &sourceWord,s
// modify the starting bitmap
}
- Phrase* m_unksrc = new Phrase(Input,1);
+ Phrase* m_unksrc = new Phrase(1);
m_unksrc->AddWord() = sourceWord;
m_unksrcs.push_back(m_unksrc);
@@ -572,7 +572,7 @@ void TranslationOptionCollection::CreateXmlOptionsForRange(size_t, size_t)
void TranslationOptionCollection::Add(TranslationOption *translationOption)
{
const WordsRange &coverage = translationOption->GetSourceWordsRange();
- assert(coverage.GetEndPos() - coverage.GetStartPos() < m_collection[coverage.GetStartPos()].size());
+ CHECK(coverage.GetEndPos() - coverage.GetStartPos() < m_collection[coverage.GetStartPos()].size());
m_collection[coverage.GetStartPos()][coverage.GetEndPos() - coverage.GetStartPos()].Add(translationOption);
}
@@ -643,7 +643,7 @@ TranslationOptionList &TranslationOptionCollection::GetTranslationOptionList(siz
size_t maxSizePhrase = StaticData::Instance().GetMaxPhraseLength();
maxSize = std::min(maxSize, maxSizePhrase);
- assert(maxSize < m_collection[startPos].size());
+ CHECK(maxSize < m_collection[startPos].size());
return m_collection[startPos][maxSize];
}
const TranslationOptionList &TranslationOptionCollection::GetTranslationOptionList(size_t startPos, size_t endPos) const
@@ -652,7 +652,7 @@ const TranslationOptionList &TranslationOptionCollection::GetTranslationOptionLi
size_t maxSizePhrase = StaticData::Instance().GetMaxPhraseLength();
maxSize = std::min(maxSize, maxSizePhrase);
- assert(maxSize < m_collection[startPos].size());
+ CHECK(maxSize < m_collection[startPos].size());
return m_collection[startPos][maxSize];
}
diff --git a/moses/src/TranslationOptionList.h b/moses/src/TranslationOptionList.h
index 926f5c13a..d56578029 100644
--- a/moses/src/TranslationOptionList.h
+++ b/moses/src/TranslationOptionList.h
@@ -2,7 +2,7 @@
#define moses_TranslationOptionList_h
#include <vector>
-#include <cassert>
+#include "util/check.hh"
#include <iostream>
#include "Util.h"
@@ -48,11 +48,11 @@ public:
}
const TranslationOption *Get(size_t ind) const {
- assert(ind < m_coll.size());
+ CHECK(ind < m_coll.size());
return m_coll[ind];
}
void Remove( size_t ind ) {
- assert(ind < m_coll.size());
+ CHECK(ind < m_coll.size());
m_coll.erase( m_coll.begin()+ind );
}
void Add(TranslationOption *transOpt) {
diff --git a/moses/src/TranslationSystem.cpp b/moses/src/TranslationSystem.cpp
index d0301667d..860e58975 100644
--- a/moses/src/TranslationSystem.cpp
+++ b/moses/src/TranslationSystem.cpp
@@ -162,7 +162,7 @@ namespace Moses {
}
float TranslationSystem::GetWeightDistortion() const {
- assert(m_distortionScoreProducer);
+ CHECK(m_distortionScoreProducer);
return StaticData::Instance().GetWeight(m_distortionScoreProducer);
}
diff --git a/moses/src/TreeInput.cpp b/moses/src/TreeInput.cpp
index a46d8409c..d6e0f1e78 100644
--- a/moses/src/TreeInput.cpp
+++ b/moses/src/TreeInput.cpp
@@ -149,7 +149,7 @@ bool TreeInput::ProcessAndStripXMLTags(string &line, std::vector<XMLParseOutput>
// specified translations -> vector of phrases
// multiple translations may be specified, separated by "||"
vector<string> altTexts = TokenizeMultiCharSeparator(ParseXmlTagAttribute(tagContent,"label"), "||");
- assert(altTexts.size() == 1);
+ CHECK(altTexts.size() == 1);
XMLParseOutput item(altTexts[0], range);
sourceLabels.push_back(item);
@@ -230,7 +230,7 @@ TranslationOptionCollection* TreeInput::CreateTranslationOptionCollection() cons
void TreeInput::AddChartLabel(size_t startPos, size_t endPos, const Word &label
, const std::vector<FactorType>& /* factorOrder */)
{
- assert(label.IsNonTerminal());
+ CHECK(label.IsNonTerminal());
SourceLabelOverlap overlapType = StaticData::Instance().GetSourceLabelOverlap();
NonTerminalSet &list = GetLabelSet(startPos, endPos);
@@ -273,7 +273,7 @@ std::ostream& operator<<(std::ostream &out, const TreeInput &input)
const Word &word = *iter;
out << "[" << startPos <<"," << endPos << "]="
<< word << "(" << word.IsNonTerminal() << ") ";
- assert(word.IsNonTerminal());
+ CHECK(word.IsNonTerminal());
}
}
}
diff --git a/moses/src/TreeInput.h b/moses/src/TreeInput.h
index f621ef0bd..a6242d97e 100644
--- a/moses/src/TreeInput.h
+++ b/moses/src/TreeInput.h
@@ -37,8 +37,7 @@ protected:
bool ProcessAndStripXMLTags(std::string &line, std::vector<XMLParseOutput> &sourceLabels);
public:
- TreeInput(FactorDirection direction)
- : Sentence(direction)
+ TreeInput()
{}
InputTypeEnum GetType() const {
diff --git a/moses/src/TrellisPath.cpp b/moses/src/TrellisPath.cpp
index f50d57af4..c73575b2c 100644
--- a/moses/src/TrellisPath.cpp
+++ b/moses/src/TrellisPath.cpp
@@ -173,7 +173,7 @@ void TrellisPath::CreateDeviantPaths(TrellisPathList &pathColl) const
Phrase TrellisPath::GetTargetPhrase() const
{
- Phrase targetPhrase(Output, ARRAY_SIZE_INCR);
+ Phrase targetPhrase(ARRAY_SIZE_INCR);
int numHypo = (int) m_path.size();
for (int node = numHypo - 2 ; node >= 0 ; --node) {
@@ -191,14 +191,14 @@ Phrase TrellisPath::GetSurfacePhrase() const
{
const std::vector<FactorType> &outputFactor = StaticData::Instance().GetOutputFactorOrder();
Phrase targetPhrase = GetTargetPhrase()
- ,ret(Output, targetPhrase.GetSize());
+ ,ret(targetPhrase.GetSize());
for (size_t pos = 0 ; pos < targetPhrase.GetSize() ; ++pos) {
Word &newWord = ret.AddWord();
for (size_t i = 0 ; i < outputFactor.size() ; i++) {
FactorType factorType = outputFactor[i];
const Factor *factor = targetPhrase.GetFactor(pos, factorType);
- assert(factor);
+ CHECK(factor);
newWord[factorType] = factor;
}
}
@@ -221,7 +221,7 @@ WordsRange TrellisPath::GetTargetWordsRange(const Hypothesis &hypo) const
}
// have to give a hypo in the trellis path, but u didn't.
- assert(false);
+ CHECK(false);
return WordsRange(NOT_FOUND, NOT_FOUND);
}
diff --git a/moses/src/TypeDef.h b/moses/src/TypeDef.h
index 859fc115a..bb439f5c9 100644
--- a/moses/src/TypeDef.h
+++ b/moses/src/TypeDef.h
@@ -72,36 +72,6 @@ const size_t DEFAULT_VERBOSE_LEVEL = 1;
// for those using autoconf/automake
#if HAVE_CONFIG_H
#include "config.h"
-
-//#define TRACE_ENABLE 1 // REMOVE after we figure this out
-
-#define LM_INTERNAL 1
-#define LM_REMOTE 1
-
-# ifdef HAVE_SRILM
-# define LM_SRI 1
-# else
-# undef LM_SRI
-# endif
-
-# ifdef HAVE_IRSTLM
-# define LM_IRST 1
-# endif
-
-# ifdef HAVE_RANDLM
-# define LM_RAND 1
-# endif
-
-# ifdef HAVE_ORLM
-# define LM_ORLM 1
-# endif
-
-# define LM_KEN 1
-
-# ifdef HAVE_DMAPLM
-# define LM_DMAP
-# endif
-
#endif
/////////////////////////////////////////////////
diff --git a/moses/src/Util.h b/moses/src/Util.h
index 2dce299b4..1b0638ec2 100644
--- a/moses/src/Util.h
+++ b/moses/src/Util.h
@@ -23,7 +23,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#define moses_Util_h
#include <iostream>
-#include <cassert>
+#include "util/check.hh"
#include <fstream>
#include <sstream>
#include <string>
@@ -285,7 +285,7 @@ inline float FloorScore(float logScore)
inline float CalcTranslationScore(const std::vector<float> &probVector,
const std::vector<float> &weightT)
{
- assert(weightT.size()==probVector.size());
+ CHECK(weightT.size()==probVector.size());
float rv=0.0;
for(float const *sb=&probVector[0],*se=sb+probVector.size(),*wb=&weightT[0];
sb!=se; ++sb, ++wb)
@@ -333,7 +333,7 @@ inline void ShrinkToFit(T& v)
{
if(v.capacity()>v.size())
T(v).swap(v);
- assert(v.capacity()==v.size());
+ CHECK(v.capacity()==v.size());
}
bool FileExists(const std::string& filePath);
diff --git a/moses/src/Word.cpp b/moses/src/Word.cpp
index 62d55968f..bc0a8e120 100644
--- a/moses/src/Word.cpp
+++ b/moses/src/Word.cpp
@@ -67,7 +67,7 @@ void Word::Merge(const Word &sourceWord)
std::string Word::GetString(const vector<FactorType> factorType,bool endWithBlank) const
{
stringstream strme;
- assert(factorType.size() <= MAX_NUM_FACTORS);
+ CHECK(factorType.size() <= MAX_NUM_FACTORS);
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();
bool firstPass = true;
for (unsigned int i = 0 ; i < factorType.size() ; i++) {
@@ -94,7 +94,7 @@ void Word::CreateFromString(FactorDirection direction
vector<string> wordVec;
Tokenize(wordVec, str, "|");
- assert(wordVec.size() == factorOrder.size());
+ CHECK(wordVec.size() == factorOrder.size());
const Factor *factor;
for (size_t ind = 0; ind < wordVec.size(); ++ind) {
diff --git a/moses/src/WordLattice.h b/moses/src/WordLattice.h
index c10d7d54b..21df48054 100644
--- a/moses/src/WordLattice.h
+++ b/moses/src/WordLattice.h
@@ -39,7 +39,7 @@ public:
void GetAsEdgeMatrix(std::vector<std::vector<bool> >& edges) const;
const NonTerminalSet &GetLabelSet(size_t /*startPos*/, size_t /*endPos*/) const {
- assert(false);
+ CHECK(false);
return *(new NonTerminalSet());
}
diff --git a/moses/src/WordsBitmap.cpp b/moses/src/WordsBitmap.cpp
index 6351ec006..b3203145f 100644
--- a/moses/src/WordsBitmap.cpp
+++ b/moses/src/WordsBitmap.cpp
@@ -37,9 +37,9 @@ int WordsBitmap::GetFutureCosts(int lastPos) const
aip1 = (i+1==m_size || m_bitmap[i+1]);
#ifndef NDEBUG
- if( i>0 ) assert( aim1==(i==0||m_bitmap[i-1]==1));
- //assert( ai==a[i] );
- if( i+1<m_size ) assert( aip1==m_bitmap[i+1]);
+ if( i>0 ) CHECK( aim1==(i==0||m_bitmap[i-1]==1));
+ //CHECK( ai==a[i] );
+ if( i+1<m_size ) CHECK( aip1==m_bitmap[i+1]);
#endif
if((i==0||aim1)&&ai==0) {
sum+=abs(lastPos-static_cast<int>(i)+1);
@@ -52,7 +52,7 @@ int WordsBitmap::GetFutureCosts(int lastPos) const
// sum+=getJumpCosts(lastPos,as,maxJumpWidth);
sum+=abs(lastPos-static_cast<int>(m_size)+1); //getCosts(lastPos,as);
- assert(sum>=0);
+ CHECK(sum>=0);
// TRACE_ERR(sum<<"\n");
diff --git a/moses/src/WordsBitmap.h b/moses/src/WordsBitmap.h
index 0364c8f28..370120571 100644
--- a/moses/src/WordsBitmap.h
+++ b/moses/src/WordsBitmap.h
@@ -204,7 +204,7 @@ public:
//! converts bitmap into an integer ID: it consists of two parts: the first 16 bit are the pattern between the first gap and the last word-1, the second 16 bit are the number of filled positions. enforces a sentence length limit of 65535 and a max distortion of 16
WordsBitmapID GetID() const {
- assert(m_size < (1<<16));
+ CHECK(m_size < (1<<16));
size_t start = GetFirstGapPos();
if (start == NOT_FOUND) start = m_size; // nothing left
@@ -212,7 +212,7 @@ public:
size_t end = GetLastPos();
if (end == NOT_FOUND) end = 0; // nothing translated yet
- assert(end < start || end-start <= 16);
+ CHECK(end < start || end-start <= 16);
WordsBitmapID id = 0;
for(size_t pos = end; pos > start; pos--) {
id = id*2 + (int) GetValue(pos);
@@ -222,7 +222,7 @@ public:
//! converts bitmap into an integer ID, with an additional span covered
WordsBitmapID GetIDPlus( size_t startPos, size_t endPos ) const {
- assert(m_size < (1<<16));
+ CHECK(m_size < (1<<16));
size_t start = GetFirstGapPos();
if (start == NOT_FOUND) start = m_size; // nothing left
@@ -233,7 +233,7 @@ public:
if (start == startPos) start = endPos+1;
if (end < endPos) end = endPos;
- assert(end < start || end-start <= 16);
+ CHECK(end < start || end-start <= 16);
WordsBitmapID id = 0;
for(size_t pos = end; pos > start; pos--) {
id = id*2;
diff --git a/moses/src/WordsRange.h b/moses/src/WordsRange.h
index 51667093d..7191d259e 100644
--- a/moses/src/WordsRange.h
+++ b/moses/src/WordsRange.h
@@ -75,7 +75,7 @@ public:
}
inline size_t GetNumWordsBetween(const WordsRange& x) const {
- assert(!Overlap(x));
+ CHECK(!Overlap(x));
if (x.m_endPos < m_startPos) {
return m_startPos - x.m_endPos;
diff --git a/moses/src/XmlOption.cpp b/moses/src/XmlOption.cpp
index ad56dd353..3e5c20689 100644
--- a/moses/src/XmlOption.cpp
+++ b/moses/src/XmlOption.cpp
@@ -344,7 +344,7 @@ bool ProcessAndStripXMLTags(string &line, vector<XmlOption*> &res, ReorderingCon
targetPhrase.SetScore(scoreValue);
XmlOption *option = new XmlOption(range,targetPhrase);
- assert(option);
+ CHECK(option);
res.push_back(option);
}
diff --git a/regenerate-makefiles.sh b/regenerate-makefiles.sh
deleted file mode 100755
index baadeb0e5..000000000
--- a/regenerate-makefiles.sh
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/bash
-
-# NOTE:
-# Versions 1.9 (or higher) of aclocal and automake are required.
-# And version >=2.60 of autoconf
-# And version >=1.4.7 of m4
-
-# For Mac OSX users:
-# Standard distribution usually includes versions 1.6.
-# Get versions 1.9 or higher
-# Set the following variable to the correct paths
-#ACLOCAL="/path/to/aclocal-1.9"
-#AUTOMAKE="/path/to/automake-1.9"
-
-function die () {
- echo "$@" >&2
-
- # Try to be as helpful as possible by detecting OS and making recommendations
- if (( $(lsb_release -a | fgrep -ci "ubuntu") > 0 )); then
- echo >&2
- echo >&2 "Need to install build autotools on Ubuntu? Use:"
- echo >&2 "sudo aptitude install autoconf automake libtool build-essential"
- fi
- if (( $(uname -a | fgrep -ci "darwin") > 0 )); then
- echo >&2
- echo >&2 "Having problems on Mac OSX?"
- echo >&2 "You might have an old version of aclocal/automake. You'll need to upgrade these."
- fi
- exit 1
-}
-
-if [ -z "$ACLOCAL" ]; then
- ACLOCAL=`which aclocal`
- [ -n "$ACLOCAL" ] || die "aclocal not found on your system. Please install it or set $ACLOCAL"
-fi
-
-if [ -z "$AUTOMAKE" ]; then
- AUTOMAKE=`which automake`
- [ -n "$AUTOMAKE" ] || die "automake not found on your system. Please install it or set $AUTOMAKE"
-fi
-
-if [ -z "$AUTOCONF" ]; then
- AUTOCONF=`which autoconf`
- [ -n "$AUTOCONF" ] || die "autoconf not found on your system. Please install it or set $AUTOCONF"
-fi
-
-if [ -z "$LIBTOOLIZE" ]; then
- LIBTOOLIZE=`which libtoolize`
-
- if [ -z "$LIBTOOLIZE" ]; then
- LIBTOOLIZE=`which glibtoolize`
- fi
-
- [ -n "$LIBTOOLIZE" ] || die "libtoolize/glibtoolize not found on your system. Please install it or set $LIBTOOLIZE"
-fi
-
-echo >&2 "Detected aclocal: $($ACLOCAL --version | head -n1)"
-echo >&2 "Detected autoconf: $($AUTOCONF --version | head -n1)"
-echo >&2 "Detected automake: $($AUTOMAKE --version | head -n1)"
-echo >&2 "Detected libtoolize: $($LIBTOOLIZE --version | head -n1)"
-
-echo "Calling $ACLOCAL..."
-$ACLOCAL -I m4 || die "aclocal failed"
-echo "Calling $AUTOCONF..."
-$AUTOCONF || die "autoconf failed"
-rm ltmain.sh 2>/dev/null
-touch ltmain.sh
-echo "Calling $AUTOMAKE..."
-$AUTOMAKE || die "automake failed"
-echo "Calling $LIBTOOLIZE"
-$LIBTOOLIZE || die "libtoolize failed"
-
-case `uname -s` in
- Darwin)
- cores=$(sysctl -n hw.ncpu)
- ;;
- Linux)
- cores=$(cat /proc/cpuinfo | fgrep -c processor)
- ;;
- *)
- echo "Unknown platform."
- cores=
- ;;
-esac
-
-if [ -z "$cores" ]; then
- cores=2 # assume 2 cores if we can't figure it out
- echo >&2 "Assuming 2 cores"
-else
- echo >&2 "Detected $cores cores"
-fi
-
-echo
-echo "You should now be able to configure and build:"
-echo " ./configure [--with-srilm=/path/to/srilm] [--with-irstlm=/path/to/irstlm] [--with-randlm=/path/to/randlm] [--with-synlm] [--with-xmlrpc-c=/path/to/xmlrpc-c-config]"
-echo " make -j ${cores}"
-echo
-
diff --git a/regression-testing/Jamfile b/regression-testing/Jamfile
new file mode 100644
index 000000000..0c00b516e
--- /dev/null
+++ b/regression-testing/Jamfile
@@ -0,0 +1,32 @@
+import option ;
+
+with-regtest = [ option.get "with-regtest" ] ;
+if $(with-regtest) {
+ path-constant TESTS : tests ;
+
+ rule reg_test ( name : tests * : program : action ) {
+ alias $(name) : $(tests:D=).passed ;
+ for test in $(tests) {
+ make $(test:D=).passed : $(program) : $(action) ;
+ alias $(test) : $(test:D=).passed ;
+ }
+ }
+
+ actions reg_test_decode {
+ $(TOP)/regression-testing/run-single-test.perl --decoder=$(>) --test=$(<:B) --data-dir=$(with-regtest) --test-dir=$(TESTS) && touch $(<)
+ }
+ reg_test phrase : [ glob tests/phrase.* ] : ../moses-cmd/src//moses : @reg_test_decode ;
+ reg_test chart : chart.target-syntax chart.target-syntax.ondisk chart.hierarchical chart.hierarchical-withsrilm chart.hierarchical.ondisk : ../moses-chart-cmd/src//moses_chart : @reg_test_decode ;
+
+ actions reg_test_score {
+ $(TOP)/regression-testing/run-test-scorer.perl --scorer=$(>) --test=$(<:B) --data-dir=$(with-regtest) --test-dir=$(TESTS) && touch $(<)
+ }
+ reg_test score : [ glob tests/score.* ] : ../scripts/training/phrase-extract//score : @reg_test_score ;
+
+ actions reg_test_mert {
+ $(TOP)/regression-testing/run-test-mert.perl --mert-dir=$(TOP)/mert --test=$(<:B) --data-dir=$(with-regtest) --test-dir=$(TESTS) && touch $(<)
+ }
+ reg_test mert : [ glob tests/mert.* ] : ../mert//legacy : @reg_test_mert ;
+
+ alias all : phrase chart score mert ;
+}
diff --git a/scripts/.cvsignore b/scripts/.cvsignore
deleted file mode 100644
index 0d20b6487..000000000
--- a/scripts/.cvsignore
+++ /dev/null
@@ -1 +0,0 @@
-*.pyc
diff --git a/scripts/Jamfile b/scripts/Jamfile
new file mode 100644
index 000000000..e33544d17
--- /dev/null
+++ b/scripts/Jamfile
@@ -0,0 +1,60 @@
+#See ../Jamroot for options.
+import option ;
+
+build-project ems/biconcor ;
+build-project training ;
+
+with-giza = [ option.get "with-giza" ] ;
+if $(with-giza) {
+ rule check-for-bin ( name ) {
+ if ! [ FILE_OPEN $(with-giza)/$(name) : "r" ] {
+ echo "Did not find "$(with-giza)/$(name)"." ;
+ echo "The with-giza directory should contain GIZA++, snt2cooc.out, and mkcls." ;
+ echo "These are available from http://code.google.com/p/giza-pp/ :" ;
+ echo " wget https://giza-pp.googlecode.com/files/giza-pp.tgz" ;
+ echo " tar xzf giza-pp.tgz" ;
+ echo " cd giza-pp" ;
+ echo " make" ;
+ echo " cp GIZA++-v2/{GIZA++,plain2snt.out,snt2cooc.out,snt2plain.out,trainGIZA++.sh} mkcls-v2/mkcls ." ;
+ exit "Then run bjam --with-giza=/path/to/giza-pp" : 1 ;
+ }
+ }
+
+ constant WITH-GIZA : $(with-giza) ;
+
+ check-for-bin GIZA++ ;
+ check-for-bin snt2cooc.out ;
+ check-for-bin mkcls ;
+} else {
+ if $(CLEANING) = no {
+ echo "If you want scripts/training/train-model.perl, pass --with-giza=/path/to/giza-pp" ;
+ }
+ constant WITH-GIZA : "no" ;
+}
+
+location = [ option.get "install-scripts" ] ;
+if $(location) {
+ location = $(location)$(GITTAG) ;
+
+ #These two used to live in a tools directory.
+ install ghkm : training/phrase-extract/extract-ghkm//extract-ghkm : <location>$(location)/training/phrase-extract/extract-ghkm/tools ;
+ install compactify : training/compact-rule-table//compactify : <location>$(location)/training/compact-rule-table/tools ;
+
+ install phrase-extract : training/phrase-extract//programs : <location>$(location)/training/phrase-extract ;
+ install lexical-reordering : training/lexical-reordering//score : <location>$(location)/training/lexical-reordering ;
+ install symal : training/symal//symal : <location>$(location)/training/symal ;
+
+ install biconcor : ems/biconcor//biconcor : <location>$(location)/ems/biconcor ;
+
+ if $(WITH-GIZA) != no {
+ install train-model : training//train-model.perl : <location>$(location)/training ;
+ }
+
+ install scripts :
+ [ glob-tree README *.js *.pl *.perl *.pm *.py *.sh *.php : tests regression-testing other bin train_model.perl ]
+ [ glob tokenizer/nonbreaking_prefixes/* ems/example/*.* ems/example/data/* ems/web/* analysis/smtgui/* : ems/web/javascripts ]
+ generic/fsa-sample.fsa
+ ems/experiment.machines
+ ems/experiment.meta
+ : <install-source-root>. <location>$(location) ;
+}
diff --git a/scripts/Makefile b/scripts/Makefile
deleted file mode 100644
index 862a88bae..000000000
--- a/scripts/Makefile
+++ /dev/null
@@ -1,189 +0,0 @@
-# This makefile is here to simplify the automatic releases (and tests!!!)
-# of the scripts
-
-
-TS?=$(shell date '+%Y%m%d-%H%M')
-DS?=$(shell date '+%Y%m%d')
-
-# Set TARGETDIR to directory where you want the compiled scripts to be copied
-# to.
-# Set BINDIR to the directory where GIZA++ and other tools are installed.
-TARGETDIR=/home/bhaddow/mira/code
-BINDIR=/home/bhaddow/mira/code/bin
-
-MAIN_SCRIPTS_TARGET_DIR=$(TARGETDIR)
-# MAIN_SCRIPTS_TARGET_DIR=$(shell echo `pwd`/temp)
-
-RELEASEDIR=$(TARGETDIR)/scripts-$(TS)
-# RELEASEDIR=$(shell echo `pwd`/temp)
-
-
-
-## Rules to compile parts that need compilation
-
-
-all: compile
-
-SUBDIRS=training/phrase-extract training/symal training/mbr training/lexical-reordering ems/biconcor
-SUBDIRS_CLEAN=$(SUBDIRS) training/memscore training/eppex training/compact-rule-table
-
-compile: compile-memscore compile-eppex compile-compact-rule-table
- touch release-exclude # No files excluded by default
- pwd=`pwd`; \
- for subdir in $(SUBDIRS); do \
- $(MAKE) -C $$subdir || exit 1; \
- echo "### Compiler $$subdir"; \
- cd $$pwd; \
- done
- ## All files that need compilation were compiled
-
-compile-memscore:
- # Building memscore may fail e.g. if boost is not available.
- # We ignore this because traditional scoring will still work and memscore isn't used by default.
- cd training/memscore ; \
- ./configure && $(MAKE) \
- || ( echo "WARNING: Building memscore failed."; \
- echo 'training/memscore/memscore' >> ../../release-exclude )
-
-compile-eppex:
- # Building eppex may fail e.g. if boost is not available.
- # We ignore this because traditional extraction will still work and eppex isn't used by default.
- cd training/eppex ; \
- ./configure && $(MAKE) \
- || ( echo "WARNING: Building eppex failed."; \
- echo 'training/eppex/counter' >> ../../release-exclude; \
- echo 'training/eppex/eppex' >> ../../release-exclude )
-
-compile-compact-rule-table:
- # Building the compact rule table tools may fail e.g. if boost is not available.
- # We ignore this because the tools aren't used by default.
- cd training/compact-rule-table ; \
- ./configure && $(MAKE) \
- || ( echo "WARNING: Building compact rule table tools failed."; \
- echo 'training/compact-rule-table/tools/compactify' >> ../../release-exclude )
-
-clean:
- pwd=`pwd`; \
- for subdir in $(SUBDIRS_CLEAN); do \
- $(MAKE) -C $$subdir clean || exit 1; \
- echo "### Compiler $$subdir"; \
- cd $$pwd; \
- done
- ## All files that need compilation were compiled
-
-
-### "MAIN" scripts are scripts that have a Philipp-like name, too
-## That is for each script (listed below in MAIN_SCRIPTS),
-## we create a date-stamped version in MAIN_SCRIPTS_TARGET_DIR
-
-MAIN_TRAINING_SCRIPTS_NAMES=filter-model-given-input.pl mert-moses-multi.pl mert-moses.pl train-model.perl clean-corpus-n.perl
-# Make trick to add directory name to all of them:
-MAIN_TRAINING_SCRIPTS=$(MAIN_TRAINING_SCRIPTS_NAMES:%=training/%)
-
-MAIN_GENERIC_SCRIPTS_NAMES= moses-parallel.pl
-# Make trick to add directory name to all of them:
-MAIN_GENERIC_SCRIPTS=$(MAIN_GENERIC_SCRIPTS_NAMES:%=generic/%)
-
-# the list of all scripts that should be released
-MAIN_SCRIPTS= $(MAIN_TRAINING_SCRIPTS) $(MAIN_GENERIC_SCRIPTS)
-
-
-release:
- # Compile the parts
- $(MAKE) all
- @./check-dependencies.pl "$(HOME)" "$(TARGETDIR)" "$(RELEASEDIR)" "$(BINDIR)"
- mkdir -p $(RELEASEDIR)
- cat ./released-files \
- | grep -v -x -f release-exclude \
- | rsync -r --files-from - . $(RELEASEDIR)/
- sed 's#^my \$$BINDIR\s*=.*#my \$$BINDIR="$(BINDIR)";#' training/train-model.perl > $(RELEASEDIR)/training/train-model.perl
- @echo "####### Do not forget to:" >> $(RELEASEDIR)/README
- @echo " export SCRIPTS_ROOTDIR=$(RELEASEDIR)" >> $(RELEASEDIR)/README
- ## Remember, only files listed in released-files are released!!
- ## Don't forget to set your SCRIPTS_ROOTDIR with:
- @echo " export SCRIPTS_ROOTDIR=$(RELEASEDIR)"
-
-generate_wrappers:
- ## And for each script, create/rewrite the daily release
- export TARGET
- @for s in $(MAIN_SCRIPTS); do \
- bn=`basename $$s`; \
- echo '#!/bin/bash' > $(MAIN_SCRIPTS_TARGET_DIR)/$$bn-$(DS) || exit 1; \
- echo "export SCRIPTS_ROOTDIR=$(RELEASEDIR); $(RELEASEDIR)/$$s "'"$$@"; exit $$?' >> $(MAIN_SCRIPTS_TARGET_DIR)/$$bn-$(DS) || exit 1; \
- chmod 775 $(MAIN_SCRIPTS_TARGET_DIR)/$$bn-$(DS); \
- done
-
-
-MOSESRELEASE=$(TARGETDIR)/moses.$(DS)
-## This is a handy goal to release moses binary, too
-releasemoses:
- if [ -z "$(TARGETDIR)" ]; then \
- echo "Please specify a TARGETDIR." ; \
- echo " For custom releases issue: "; \
- echo " TARGETDIR=$(HOME)/releases make releasemoses"; \
- echo " For official releases: "; \
- echo " TARGETDIR=/export/ws06osmt make releasemoses"; \
- exit 1; \
- fi
- if [ -e $(MOSESRELEASE) ]; then echo "Moses release exists! Not touching it! $(MOSESRELEASE)"; exit 1; fi
- if [ ! -e ../moses-cmd/src/moses ]; then echo "Moses (../moses-cmd/src/moses) does not exist, nothing to release"; ecit 1; fi
- if file ../moses-cmd/src/moses | grep -q 'dynamicall' ; then echo "Moses (../moses-cmd/src/moses) is dynamically linked, not releasing."; ecit 1; fi
- cp ../moses-cmd/src/moses $(MOSESRELEASE)
- ## Your current version of moses:
- @echo " $(MOSESRELEASE)"
-
-
-## This goal lists all files you might have wanted to release
-# but forgot to mention in released-files
-missed:
- ### These might be intended for release
- find . -type f \
- | grep -v '/CVS/' \
- | grep -v /tests/ \
- | sed 's/^\.\///' \
- | grep -F -x -v -f released-files
-
-
-### Tests, applicable only at JHU environment due to data dependencies
-export WORKSPACE=$(shell pwd)/../
-
-.PHONY: tests
-tests:
- export SCRIPTS_ROOTDIR=`pwd`; \
- cd tests; \
- ts=`date '+%Y%m%d-%H%M%N'`; \
- for test in *.test; do \
- mkdir $$test.$$ts; \
- cd $$test.$$ts; \
- echo "Running $$test in tests/$$test.$$ts"; \
- ../$$test > log 2>&1 || exit 1; \
- cd ..; \
- done
- ## All tests passed
-
-## Run just one test in the background
-tests/%.test.run: tests/%.test
- export SCRIPTS_ROOTDIR=`pwd`; \
- ts=`date '+%Y%m%d-%H%M%N'`; \
- cd tests; \
- test=$*.test; \
- mkdir $$test.$$ts; \
- cd $$test.$$ts; \
- echo "Running $$test in tests/$$test.$$ts"; \
- ( nohup ../$$test > log 2>&1 & ) || exit 1; \
- echo "Observe tests/$$test.$$ts/log"; \
- cd ..
-
-## Run just one test in the foreground
-tests/%.test.runfg: tests/%.test
- export SCRIPTS_ROOTDIR=`pwd`; \
- ts=`date '+%Y%m%d-%H%M%N'`; \
- cd tests; \
- test=$*.test; \
- mkdir $$test.$$ts; \
- cd $$test.$$ts; \
- echo "Running $$test in tests/$$test.$$ts"; \
- ../$$test 2>&1 | tee log ; \
- echo "Log saved to tests/$$test.$$ts/log"; \
- cd ..
-
diff --git a/scripts/README b/scripts/README
index 609352bcc..35dac9dd0 100644
--- a/scripts/README
+++ b/scripts/README
@@ -8,7 +8,7 @@ This directory should contain all multi-purpose scripts for:
- lib ... perl modules used by various scripts
-The Makefile then takes care of proper 'release' from your CVS directory to
+The Jamfile then takes care of proper 'release' from your git directory to
the shared directories.
The released scripts should remain in the *same directory structure*.
diff --git a/scripts/check-dependencies.pl b/scripts/check-dependencies.pl
deleted file mode 100755
index e93520c8d..000000000
--- a/scripts/check-dependencies.pl
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/perl
-
-# $Id$
-
-my ($home, $target_dir, $release_dir, $bin_dir) = @ARGV;
-
-#print "HOME: $home\nTARGET_DIR: $target_dir\nRELEASE_DIR: $release_dir\n";
-
-if ($target_dir eq '' || -z $target_dir) {
- print <<EOT;
-Please specify a TARGETDIR.
-
- For development releases you probably want the following:
- TARGETDIR=$home/releases make release
-
- For shared environments, you will want to set TARGETDIR to
- some appropriately common directory.
-
-EOT
- exit 1;
-}
-
-if (-e $release_dir) {
- print "Targetdir exists! Not touching it! $release_dir";
- exit 1;
-}
-
-unless (-x "$bin_dir/GIZA++" && -x "$bin_dir/snt2cooc.out" && -x "$bin_dir/mkcls" ) {
- print <<EOT;
-Please specify a BINDIR.
-
- The BINDIR directory must contain GIZA++, snt2cooc.out and mkcls executables.
- These are available from http://www.fjoch.com/GIZA++.html and
- http://www-i6.informatik.rwth-aachen.de/Colleagues/och/software/mkcls.html .
-EOT
- exit 1;
-}
-
-
-exit 0;
-
diff --git a/scripts/ems/biconcor/Jamfile b/scripts/ems/biconcor/Jamfile
new file mode 100644
index 000000000..003193067
--- /dev/null
+++ b/scripts/ems/biconcor/Jamfile
@@ -0,0 +1,3 @@
+exe biconcor : Vocabulary.cpp SuffixArray.cpp TargetCorpus.cpp Alignment.cpp Mismatch.cpp PhrasePair.cpp PhrasePairCollection.cpp biconcor.cpp base64.cpp ;
+
+install legacy : biconcor : <location>. ;
diff --git a/scripts/ems/biconcor/Makefile b/scripts/ems/biconcor/Makefile
deleted file mode 100644
index 3b2aa9636..000000000
--- a/scripts/ems/biconcor/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-all: biconcor
-
-clean:
- rm -f *.o
-
-.cpp.o:
- g++ -O6 -g -c $<
-
-biconcor: Vocabulary.o SuffixArray.o TargetCorpus.o Alignment.o Mismatch.o PhrasePair.o PhrasePairCollection.o biconcor.o base64.o
- g++ Vocabulary.o SuffixArray.o TargetCorpus.o Alignment.o Mismatch.o PhrasePair.o PhrasePairCollection.o biconcor.o base64.o -o biconcor
diff --git a/scripts/ems/example/config.basic b/scripts/ems/example/config.basic
index a8bfbfdd3..fb8d62c74 100644
--- a/scripts/ems/example/config.basic
+++ b/scripts/ems/example/config.basic
@@ -30,13 +30,13 @@ wmt10-data = $working-dir/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-cmd/src/moses
+decoder = $moses-src-dir/dist/bin/moses
# conversion of phrase table into binary on-disk format
-ttable-binarizer = $moses-src-dir/misc/processPhraseTable
+ttable-binarizer = $moses-src-dir/dist/bin/processPhraseTable
# conversion of rule table into binary on-disk format
-#ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
+#ttable-binarizer = "$moses-src-dir/dist/bin/CreateOnDiskPt 1 1 5 100 2"
# tokenizers - comment out if all your data is already tokenized
input-tokenizer = "$moses-script-dir/tokenizer/tokenizer.perl -a -l $input-extension"
@@ -141,7 +141,7 @@ order = 5
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
#
@@ -222,7 +222,7 @@ tuning-sgm = $wmt10-data/dev/news-test2008-ref.$output-extension.sgm
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
diff --git a/scripts/ems/example/config.factored b/scripts/ems/example/config.factored
index 5bbe72a66..0fb072c66 100644
--- a/scripts/ems/example/config.factored
+++ b/scripts/ems/example/config.factored
@@ -30,13 +30,13 @@ wmt10-data = $working-dir/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-cmd/src/moses
+decoder = $moses-src-dir/dist/bin/moses
# conversion of phrase table into binary on-disk format
ttable-binarizer = $moses-src-dir/misc/processPhraseTable
# conversion of rule table into binary on-disk format
-#ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
+#ttable-binarizer = "$moses-src-dir/dist/bin/CreateOnDiskPt 1 1 5 100 2"
# tokenizers - comment out if all your data is already tokenized
input-tokenizer = "$moses-script-dir/tokenizer/tokenizer.perl -a -l $input-extension"
@@ -141,7 +141,7 @@ order = 5
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
@@ -221,7 +221,7 @@ tuning-sgm = $wmt10-data/dev/news-test2008-ref.$output-extension.sgm
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
diff --git a/scripts/ems/example/config.hierarchical b/scripts/ems/example/config.hierarchical
index 7f2e8a5fd..9284726c7 100644
--- a/scripts/ems/example/config.hierarchical
+++ b/scripts/ems/example/config.hierarchical
@@ -30,13 +30,13 @@ wmt10-data = $working-dir/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-chart-cmd/src/moses_chart
+decoder = $moses-src-dir/dist/bin/moses_chart
# conversion of phrase table into binary on-disk format
-#ttable-binarizer = $moses-src-dir/misc/processPhraseTable
+#ttable-binarizer = $moses-src-dir/dist/bin/processPhraseTable
# conversion of rule table into binary on-disk format
-ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
+ttable-binarizer = "$moses-src-dir/dist/bin/CreateOnDiskPt 1 1 5 100 2"
# tokenizers - comment out if all your data is already tokenized
input-tokenizer = "$moses-script-dir/tokenizer/tokenizer.perl -a -l $input-extension"
@@ -141,7 +141,7 @@ order = 5
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
@@ -216,7 +216,7 @@ tuning-sgm = $wmt10-data/dev/news-test2008-ref.$output-extension.sgm
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
diff --git a/scripts/ems/example/config.syntax b/scripts/ems/example/config.syntax
index 7d25ed1c2..085e210a5 100644
--- a/scripts/ems/example/config.syntax
+++ b/scripts/ems/example/config.syntax
@@ -30,13 +30,13 @@ wmt10-data = $working-dir/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-chart-cmd/src/moses_chart
+decoder = $moses-src-dir/dist/bin/moses_chart
# conversion of phrase table into binary on-disk format
-#ttable-binarizer = $moses-src-dir/misc/processPhraseTable
+#ttable-binarizer = $moses-src-dir/dist/bin/processPhraseTable
# conversion of rule table into binary on-disk format
-ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
+ttable-binarizer = "$moses-src-dir/dist/bin/CreateOnDiskPt 1 1 5 100 2"
# tokenizers - comment out if all your data is already tokenized
input-tokenizer = "$moses-script-dir/tokenizer/tokenizer.perl -a -l $input-extension"
@@ -145,7 +145,7 @@ order = 5
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
@@ -220,7 +220,7 @@ tuning-sgm = $wmt10-data/dev/news-test2008-ref.$output-extension.sgm
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
diff --git a/scripts/ems/example/config.toy b/scripts/ems/example/config.toy
index 5aa4945a2..59753a50e 100644
--- a/scripts/ems/example/config.toy
+++ b/scripts/ems/example/config.toy
@@ -30,13 +30,13 @@ toy-data = $moses-script-dir/ems/example/data
### basic tools
#
# moses decoder
-decoder = $moses-src-dir/moses-cmd/src/moses
+decoder = $moses-src-dir/dist/bin/moses
# conversion of phrase table into binary on-disk format
-ttable-binarizer = $moses-src-dir/misc/processPhraseTable
+ttable-binarizer = $moses-src-dir/dist/bin/processPhraseTable
# conversion of rule table into binary on-disk format
-#ttable-binarizer = "$moses-src-dir/CreateOnDisk/src/CreateOnDiskPt 1 1 5 100 2"
+#ttable-binarizer = "$moses-src-dir/dist/bin/CreateOnDiskPt 1 1 5 100 2"
# tokenizers - comment out if all your data is already tokenized
input-tokenizer = "$moses-script-dir/tokenizer/tokenizer.perl -a -l $input-extension"
@@ -135,7 +135,7 @@ order = 5
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
@@ -200,7 +200,7 @@ raw-corpus = $toy-data/nc-5k.$output-extension
#lm-binarizer = $moses-src-dir/irstlm/bin/compile-lm
# kenlm, also set type to 8
-#lm-binarizer = $moses-src-dir/kenlm/build_binary
+#lm-binarizer = $moses-src-dir/dist/bin/build_binary
#type = 8
### script to create quantized language model format (irstlm)
diff --git a/scripts/ems/experiment.perl b/scripts/ems/experiment.perl
index 19216fe90..a2f7e6200 100755
--- a/scripts/ems/experiment.perl
+++ b/scripts/ems/experiment.perl
@@ -1662,6 +1662,10 @@ sub define_training_extract_phrases {
my $unknown_word_label = &versionize(&long_file_name("unknown-word-label","model",""));
$cmd .= "-unknown-word-label $unknown_word_label ";
}
+
+ if (&get("TRAINING:use-ghkm")) {
+ $cmd .= "-ghkm ";
+ }
}
diff --git a/scripts/ems/support/reuse-weights.perl b/scripts/ems/support/reuse-weights.perl
index b798b04cc..b64c81fa9 100755
--- a/scripts/ems/support/reuse-weights.perl
+++ b/scripts/ems/support/reuse-weights.perl
@@ -22,7 +22,7 @@ while(<WEIGHT>) {
elsif (/^\[weight\-(\S+)\]/) {
$current_weight = $1;
}
- elsif ($current_weight && /^([\-\d\.e]+)\s*$/) {
+ elsif ($current_weight && /^([\-\d\.]+)([Ee][+-]?[\d]+)?$/) {
push @{$WEIGHT{$current_weight}},$1;
}
elsif ($weights_file_flag && !/^\[/ && !/^\s*$/) {
@@ -50,7 +50,7 @@ while(<STDIN>) {
}
}
}
- elsif ($current_weight && /^([\-\d\.]+)\s*$/) {
+ elsif ($current_weight && /^([\-\d\.]+)([Ee][+-]?[\d]+)?$/) {
$IGNORE{$current_weight}++;
}
elsif (/^\[/) {
diff --git a/scripts/recaser/train-recaser.perl b/scripts/recaser/train-recaser.perl
index 49ee508bb..a5a707554 100755
--- a/scripts/recaser/train-recaser.perl
+++ b/scripts/recaser/train-recaser.perl
@@ -8,28 +8,74 @@ binmode(STDIN, ":utf8");
binmode(STDOUT, ":utf8");
# apply switches
-my ($DIR,$CORPUS,$SCRIPTS_ROOT_DIR,$CONFIG);
+my ($DIR,$CORPUS,$SCRIPTS_ROOT_DIR,$CONFIG,$HELP,$ERROR);
+my $LM = "SRILM"; # SRILM is default.
+my $BUILD_LM = "build-lm.sh";
my $NGRAM_COUNT = "ngram-count";
my $TRAIN_SCRIPT = "train-factored-phrase-model.perl";
my $MAX_LEN = 1;
my $FIRST_STEP = 1;
my $LAST_STEP = 11;
-die("train-recaser.perl --dir recaser --corpus cased")
+$ERROR = "training Aborted."
unless &GetOptions('first-step=i' => \$FIRST_STEP,
'last-step=i' => \$LAST_STEP,
'corpus=s' => \$CORPUS,
'config=s' => \$CONFIG,
- 'dir=s' => \$DIR,
- 'ngram-count=s' => \$NGRAM_COUNT,
- 'train-script=s' => \$TRAIN_SCRIPT,
- 'scripts-root-dir=s' => \$SCRIPTS_ROOT_DIR,
- 'max-len=i' => \$MAX_LEN);
+ 'dir=s' => \$DIR,
+ 'ngram-count=s' => \$NGRAM_COUNT,
+ 'build-lm=s' => \$BUILD_LM,
+ 'lm=s' => \$LM,
+ 'train-script=s' => \$TRAIN_SCRIPT,
+ 'scripts-root-dir=s' => \$SCRIPTS_ROOT_DIR,
+ 'max-len=i' => \$MAX_LEN,
+ 'help' => \$HELP);
# check and set default to unset parameters
-die("please specify working dir --dir") unless defined($DIR);
-die("please specify --corpus") if !defined($CORPUS)
+$ERROR = "please specify working dir --dir" unless defined($DIR) || defined($HELP);
+$ERROR = "please specify --corpus" if !defined($CORPUS) && !defined($HELP)
&& $FIRST_STEP <= 2 && $LAST_STEP >= 1;
+if ($HELP || $ERROR) {
+ if ($ERROR) {
+ print STDERR "ERROR: " . $ERROR . "\n";
+ }
+ print STDERR "Usage: $0 --dir /output/recaser --corpus /Cased/corpus/files [options ...]";
+
+ print STDERR "\n\nOptions:
+ == MANDATORY ==
+ --dir=dir ... outputted recaser directory.
+ --corpus=file ... inputted cased corpus.
+
+ == OPTIONAL ==
+ = Recaser Training configuration =
+ --train-script=file ... path to the train script (default: train-factored-phrase-model.perl in \$PATH).
+ --config=config ... training script configuration.
+ --scripts-root-dir=dir ... scripts directory.
+ --max-len=int ... max phrase length (default: 1).
+
+ = Language Model Training configuration =
+ --lm=[IRSTLM,SRILM] ... language model (default: SRILM).
+ --build-lm=file ... path to build-lm.sh if not in \$PATH (used only with --lm=IRSTLM).
+ --ngram-count=file ... path to ngram-count.sh if not in \$PATH (used only with --lm=SRILM).
+
+ = Steps this script will perform =
+ (1) Truecasing (disabled);
+ (2) Language Model Training;
+ (3) Data Preparation
+ (4-10) Recaser Model Training;
+ (11) Cleanup.
+ --first-step=[1-11] ... step where script starts (default: 1).
+ --last-step=[1-11] ... step where script ends (default: 11).
+
+ --help ... this usage output.\n";
+ if ($ERROR) {
+ exit(1);
+ }
+ else {
+ exit(0);
+ }
+}
+
# main loop
`mkdir -p $DIR`;
&truecase() if 0 && $FIRST_STEP == 1;
@@ -46,9 +92,17 @@ sub truecase {
sub train_lm {
print STDERR "(2) Train language model on cased data @ ".`date`;
- my $cmd = "$NGRAM_COUNT -text $CORPUS -lm $DIR/cased.srilm.gz -interpolate -kndiscount";
+ my $cmd = "";
+ if (uc $LM eq "IRSTLM") {
+ $cmd = "$BUILD_LM -t /tmp -i $CORPUS -n 3 -o $DIR/cased.irstlm.gz";
+ }
+ else {
+ $LM = "SRILM";
+ $cmd = "$NGRAM_COUNT -text $CORPUS -lm $DIR/cased.srilm.gz -interpolate -kndiscount";
+ }
+ print STDERR "** Using $LM **" . "\n";
print STDERR $cmd."\n";
- print STDERR `$cmd`;
+ system($cmd) == 0 || die("Language model training failed with error " . ($? >> 8) . "\n");
}
sub prepare_data {
@@ -88,16 +142,28 @@ sub train_recase_model {
my $first = $FIRST_STEP;
$first = 4 if $first < 4;
print STDERR "\n(4) Training recasing model @ ".`date`;
- my $cmd = "$TRAIN_SCRIPT --root-dir $DIR --model-dir $DIR --first-step $first --alignment a --corpus $DIR/aligned --f lowercased --e cased --max-phrase-length $MAX_LEN --lm 0:3:$DIR/cased.srilm.gz:0";
+ my $cmd = "$TRAIN_SCRIPT --root-dir $DIR --model-dir $DIR --first-step $first --alignment a --corpus $DIR/aligned --f lowercased --e cased --max-phrase-length $MAX_LEN";
+ if (uc $LM eq "IRSTLM") {
+ $cmd .= " --lm 0:3:$DIR/cased.irstlm.gz:1";
+ }
+ else {
+ $cmd .= " --lm 0:3:$DIR/cased.srilm.gz:0";
+ }
$cmd .= " -scripts-root-dir $SCRIPTS_ROOT_DIR" if $SCRIPTS_ROOT_DIR;
$cmd .= " -config $CONFIG" if $CONFIG;
print STDERR $cmd."\n";
- print STDERR `$cmd`;
+ system($cmd) == 0 || die("Recaser model training failed with error " . ($? >> 8) . "\n");
}
sub cleanup {
print STDERR "\n(11) Cleaning up @ ".`date`;
`rm -f $DIR/extract*`;
+ my $clean_1 = $?;
`rm -f $DIR/aligned*`;
+ my $clean_2 = $?;
`rm -f $DIR/lex*`;
+ my $clean_3 = $?;
+ if ($clean_1 + $clean_2 + $clean_3 != 0) {
+ print STDERR "Training successful but some files could not be cleaned.\n";
+ }
}
diff --git a/scripts/released-files b/scripts/released-files
deleted file mode 100644
index cb6eef944..000000000
--- a/scripts/released-files
+++ /dev/null
@@ -1,111 +0,0 @@
-analysis/README
-analysis/sentence-by-sentence.pl
-ems/experiment.machines
-ems/experiment.meta
-ems/experiment.perl
-ems/example/config.basic
-ems/example/config.factored
-ems/example/config.hierarchical
-ems/example/config.syntax
-ems/example/config.toy
-ems/example/data/nc-5k.en
-ems/example/data/nc-5k.fr
-ems/example/data/test-ref.en.sgm
-ems/example/data/test-src.fr.sgm
-ems/support/analysis.perl
-ems/support/berkeley-process.sh
-ems/support/berkeley-train.sh
-ems/support/consolidate-training-data.perl
-ems/support/generic-multicore-parallelizer.perl
-ems/support/generic-parallelizer.perl
-ems/support/input-from-sgm.perl
-ems/support/interpolate-lm.perl
-ems/support/reference-from-sgm.perl
-ems/support/remove-segmenation-markup.perl
-ems/support/report-experiment-scores.perl
-ems/support/reuse-weights.perl
-ems/support/run-command-on-multiple-refsets.perl
-ems/support/wrap-xml.perl
-ems/web/analysis.php
-ems/web/analysis_diff.php
-ems/web/comment.php
-ems/web/diff.php
-ems/web/index.php
-ems/web/lib.php
-ems/web/overview.php
-ems/web/setup
-ems/web/javascripts/builder.js
-ems/web/javascripts/controls.js
-ems/web/javascripts/dragdrop.js
-ems/web/javascripts/effects.js
-ems/web/javascripts/prototype.js
-ems/web/javascripts/scriptaculous.js
-ems/web/javascripts/slider.js
-ems/web/javascripts/sound.js
-ems/web/javascripts/unittest.js
-generic/compound-splitter.perl
-generic/extract-factors.pl
-generic/lopar2pos.pl
-generic/moses-parallel.pl
-generic/mteval-v12.pl
-generic/multi-bleu.perl
-generic/qsub-wrapper.pl
-Makefile
-README
-released-files
-tokenizer/detokenizer.perl
-tokenizer/tokenizer.perl
-tokenizer/lowercase.perl
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.ru
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.ro
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.de
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.fr
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.el
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.is
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.pt
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.nl
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.it
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.sl
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.sk
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.pl
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.sv
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.es
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.en
-tokenizer/nonbreaking_prefixes/nonbreaking_prefix.ca
-training/absolutize_moses_model.pl
-training/build-generation-table.perl
-training/clean-corpus-n.perl
-training/clone_moses_model.pl
-training/compact-rule-table/tools/compactify
-training/eppex/counter
-training/eppex/eppex
-training/mbr/mbr
-training/corpus-sizes.perl
-training/filter-model-given-input.pl
-training/filter-rule-table.py
-training/lexical-reordering/score
-training/memscore/memscore
-training/zmert-moses.pl
-training/mert-moses.pl
-training/mert-moses-multi.pl
-training/phrase-extract/extract
-training/phrase-extract/extract-rules
-training/phrase-extract/score
-training/phrase-extract/consolidate
-training/postprocess-lopar.perl
-training/reduce_combine.pl
-training/combine_factors.pl
-training/train-model.perl
-training/symal/symal
-training/symal/giza2bal.pl
-training/wrappers/parse-de-bitpar.perl
-training/wrappers/parse-en-collins.perl
-training/wrappers/make-factor-en-pos.mxpost.perl
-training/wrappers/make-factor-pos.tree-tagger.perl
-training/wrappers/make-factor-stem.perl
-recaser/train-recaser.perl
-recaser/recase.perl
-recaser/truecase.perl
-recaser/detruecase.perl
-recaser/train-truecaser.perl
-
diff --git a/scripts/tokenizer/nonbreaking_prefixes/README.txt b/scripts/tokenizer/nonbreaking_prefixes/README.txt
new file mode 100644
index 000000000..02cdfccb9
--- /dev/null
+++ b/scripts/tokenizer/nonbreaking_prefixes/README.txt
@@ -0,0 +1,5 @@
+The language suffix can be found here:
+
+http://www.loc.gov/standards/iso639-2/php/code_list.php
+
+
diff --git a/scripts/training/Jamfile b/scripts/training/Jamfile
new file mode 100644
index 000000000..8ca408d07
--- /dev/null
+++ b/scripts/training/Jamfile
@@ -0,0 +1,14 @@
+build-project compact-rule-table ;
+build-project phrase-extract ;
+build-project lexical-reordering ;
+build-project symal ;
+
+if $(WITH-GIZA) != no || $(CLEANING) != no {
+ make train-model.perl : train-model.perl.missing_bin_dir : @missing_bin_dir ;
+ actions missing_bin_dir {
+ sed 's#^my \$BINDIR\s*=.*#my\ \$BINDIR=\"$(WITH-GIZA)\";#' $(>) >$(<)
+ chmod +x $(<)
+ }
+
+ install legacy : train-model.perl : <location>. ;
+}
diff --git a/scripts/training/compact-rule-table/tools/Compactify.cpp b/scripts/training/compact-rule-table/Compactify.cpp
index ceb7eb090..ceb7eb090 100644
--- a/scripts/training/compact-rule-table/tools/Compactify.cpp
+++ b/scripts/training/compact-rule-table/Compactify.cpp
diff --git a/scripts/training/compact-rule-table/tools/Compactify.h b/scripts/training/compact-rule-table/Compactify.h
index eb9febbba..eb9febbba 100644
--- a/scripts/training/compact-rule-table/tools/Compactify.h
+++ b/scripts/training/compact-rule-table/Compactify.h
diff --git a/scripts/training/compact-rule-table/tools/Compactify_Main.cpp b/scripts/training/compact-rule-table/Compactify_Main.cpp
index 5ae8833c1..5ae8833c1 100644
--- a/scripts/training/compact-rule-table/tools/Compactify_Main.cpp
+++ b/scripts/training/compact-rule-table/Compactify_Main.cpp
diff --git a/scripts/training/compact-rule-table/tools/Exception.h b/scripts/training/compact-rule-table/Exception.h
index 3868fcec5..3868fcec5 100644
--- a/scripts/training/compact-rule-table/tools/Exception.h
+++ b/scripts/training/compact-rule-table/Exception.h
diff --git a/scripts/training/compact-rule-table/Jamfile b/scripts/training/compact-rule-table/Jamfile
new file mode 100644
index 000000000..09f45d454
--- /dev/null
+++ b/scripts/training/compact-rule-table/Jamfile
@@ -0,0 +1,3 @@
+exe compactify : [ glob *.cpp ] ../../..//boost_program_options ;
+
+install tools : compactify : <install-type>EXE ;
diff --git a/scripts/training/compact-rule-table/Makefile.am b/scripts/training/compact-rule-table/Makefile.am
deleted file mode 100644
index 88c1c68cc..000000000
--- a/scripts/training/compact-rule-table/Makefile.am
+++ /dev/null
@@ -1,3 +0,0 @@
-ACLOCAL_AMFLAGS = -I m4
-
-SUBDIRS = tools
diff --git a/scripts/training/compact-rule-table/tools/NumberedSet.h b/scripts/training/compact-rule-table/NumberedSet.h
index 72c01cb30..72c01cb30 100644
--- a/scripts/training/compact-rule-table/tools/NumberedSet.h
+++ b/scripts/training/compact-rule-table/NumberedSet.h
diff --git a/scripts/training/compact-rule-table/tools/Options.h b/scripts/training/compact-rule-table/Options.h
index f3fdb9139..f3fdb9139 100644
--- a/scripts/training/compact-rule-table/tools/Options.h
+++ b/scripts/training/compact-rule-table/Options.h
diff --git a/scripts/training/compact-rule-table/tools/RuleTableParser.cpp b/scripts/training/compact-rule-table/RuleTableParser.cpp
index 6272079fd..6272079fd 100644
--- a/scripts/training/compact-rule-table/tools/RuleTableParser.cpp
+++ b/scripts/training/compact-rule-table/RuleTableParser.cpp
diff --git a/scripts/training/compact-rule-table/tools/RuleTableParser.h b/scripts/training/compact-rule-table/RuleTableParser.h
index 5599e63de..5599e63de 100644
--- a/scripts/training/compact-rule-table/tools/RuleTableParser.h
+++ b/scripts/training/compact-rule-table/RuleTableParser.h
diff --git a/scripts/training/compact-rule-table/tools/Tool.h b/scripts/training/compact-rule-table/Tool.h
index 4a7e1e0e1..4a7e1e0e1 100644
--- a/scripts/training/compact-rule-table/tools/Tool.h
+++ b/scripts/training/compact-rule-table/Tool.h
diff --git a/scripts/training/compact-rule-table/aclocal.m4 b/scripts/training/compact-rule-table/aclocal.m4
deleted file mode 100644
index d5a8a1a47..000000000
--- a/scripts/training/compact-rule-table/aclocal.m4
+++ /dev/null
@@ -1,1056 +0,0 @@
-# generated automatically by aclocal 1.11.1 -*- Autoconf -*-
-
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
-# 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
-# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
-# PARTICULAR PURPOSE.
-
-m4_ifndef([AC_AUTOCONF_VERSION],
- [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
-m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.68],,
-[m4_warning([this file was generated for autoconf 2.68.
-You have another version of autoconf. It may work, but is not guaranteed to.
-If you have problems, you may need to regenerate the build system entirely.
-To do so, use the procedure documented by the package, typically `autoreconf'.])])
-
-# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
-#
-# Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
-# Written by Scott James Remnant, 2004.
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-# serial 5 lt~obsolete.m4
-
-# These exist entirely to fool aclocal when bootstrapping libtool.
-#
-# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
-# which have later been changed to m4_define as they aren't part of the
-# exported API, or moved to Autoconf or Automake where they belong.
-#
-# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
-# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
-# using a macro with the same name in our local m4/libtool.m4 it'll
-# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
-# and doesn't know about Autoconf macros at all.)
-#
-# So we provide this file, which has a silly filename so it's always
-# included after everything else. This provides aclocal with the
-# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
-# because those macros already exist, or will be overwritten later.
-# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
-#
-# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
-# Yes, that means every name once taken will need to remain here until
-# we give up compatibility with versions before 1.7, at which point
-# we need to keep only those names which we still refer to.
-
-# This is to help aclocal find these macros, as it can't see m4_define.
-AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
-
-m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
-m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
-m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
-m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
-m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
-m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
-m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
-m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
-m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
-m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
-m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
-m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
-m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
-m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
-m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
-m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
-m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
-m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
-m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
-m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
-m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
-m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
-m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
-m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
-m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
-m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
-m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
-m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
-m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
-m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
-m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
-m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
-m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
-m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
-m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
-m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
-m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
-m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
-m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
-m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
-m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
-m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
-m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
-m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
-m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
-m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
-m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
-m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
-m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
-m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
-m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
-m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])])
-m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
-m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
-m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])])
-m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])])
-m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])])
-
-# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# AM_AUTOMAKE_VERSION(VERSION)
-# ----------------------------
-# Automake X.Y traces this macro to ensure aclocal.m4 has been
-# generated from the m4 files accompanying Automake X.Y.
-# (This private macro should not be called outside this file.)
-AC_DEFUN([AM_AUTOMAKE_VERSION],
-[am__api_version='1.11'
-dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
-dnl require some minimum version. Point them to the right macro.
-m4_if([$1], [1.11.1], [],
- [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
-])
-
-# _AM_AUTOCONF_VERSION(VERSION)
-# -----------------------------
-# aclocal traces this macro to find the Autoconf version.
-# This is a private macro too. Using m4_define simplifies
-# the logic in aclocal, which can simply ignore this definition.
-m4_define([_AM_AUTOCONF_VERSION], [])
-
-# AM_SET_CURRENT_AUTOMAKE_VERSION
-# -------------------------------
-# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
-# This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
-AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
-[AM_AUTOMAKE_VERSION([1.11.1])dnl
-m4_ifndef([AC_AUTOCONF_VERSION],
- [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
-_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
-
-# AM_AUX_DIR_EXPAND -*- Autoconf -*-
-
-# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
-# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to
-# `$srcdir', `$srcdir/..', or `$srcdir/../..'.
-#
-# Of course, Automake must honor this variable whenever it calls a
-# tool from the auxiliary directory. The problem is that $srcdir (and
-# therefore $ac_aux_dir as well) can be either absolute or relative,
-# depending on how configure is run. This is pretty annoying, since
-# it makes $ac_aux_dir quite unusable in subdirectories: in the top
-# source directory, any form will work fine, but in subdirectories a
-# relative path needs to be adjusted first.
-#
-# $ac_aux_dir/missing
-# fails when called from a subdirectory if $ac_aux_dir is relative
-# $top_srcdir/$ac_aux_dir/missing
-# fails if $ac_aux_dir is absolute,
-# fails when called from a subdirectory in a VPATH build with
-# a relative $ac_aux_dir
-#
-# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
-# are both prefixed by $srcdir. In an in-source build this is usually
-# harmless because $srcdir is `.', but things will broke when you
-# start a VPATH build or use an absolute $srcdir.
-#
-# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
-# iff we strip the leading $srcdir from $ac_aux_dir. That would be:
-# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
-# and then we would define $MISSING as
-# MISSING="\${SHELL} $am_aux_dir/missing"
-# This will work as long as MISSING is not called from configure, because
-# unfortunately $(top_srcdir) has no meaning in configure.
-# However there are other variables, like CC, which are often used in
-# configure, and could therefore not use this "fixed" $ac_aux_dir.
-#
-# Another solution, used here, is to always expand $ac_aux_dir to an
-# absolute PATH. The drawback is that using absolute paths prevent a
-# configured tree to be moved without reconfiguration.
-
-AC_DEFUN([AM_AUX_DIR_EXPAND],
-[dnl Rely on autoconf to set up CDPATH properly.
-AC_PREREQ([2.50])dnl
-# expand $ac_aux_dir to an absolute path
-am_aux_dir=`cd $ac_aux_dir && pwd`
-])
-
-# AM_CONDITIONAL -*- Autoconf -*-
-
-# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008
-# Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 9
-
-# AM_CONDITIONAL(NAME, SHELL-CONDITION)
-# -------------------------------------
-# Define a conditional.
-AC_DEFUN([AM_CONDITIONAL],
-[AC_PREREQ(2.52)dnl
- ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])],
- [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
-AC_SUBST([$1_TRUE])dnl
-AC_SUBST([$1_FALSE])dnl
-_AM_SUBST_NOTMAKE([$1_TRUE])dnl
-_AM_SUBST_NOTMAKE([$1_FALSE])dnl
-m4_define([_AM_COND_VALUE_$1], [$2])dnl
-if $2; then
- $1_TRUE=
- $1_FALSE='#'
-else
- $1_TRUE='#'
- $1_FALSE=
-fi
-AC_CONFIG_COMMANDS_PRE(
-[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
- AC_MSG_ERROR([[conditional "$1" was never defined.
-Usually this means the macro was only invoked conditionally.]])
-fi])])
-
-# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009
-# Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 10
-
-# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be
-# written in clear, in which case automake, when reading aclocal.m4,
-# will think it sees a *use*, and therefore will trigger all it's
-# C support machinery. Also note that it means that autoscan, seeing
-# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
-
-
-# _AM_DEPENDENCIES(NAME)
-# ----------------------
-# See how the compiler implements dependency checking.
-# NAME is "CC", "CXX", "GCJ", or "OBJC".
-# We try a few techniques and use that to set a single cache variable.
-#
-# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
-# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
-# dependency, and given that the user is not expected to run this macro,
-# just rely on AC_PROG_CC.
-AC_DEFUN([_AM_DEPENDENCIES],
-[AC_REQUIRE([AM_SET_DEPDIR])dnl
-AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
-AC_REQUIRE([AM_MAKE_INCLUDE])dnl
-AC_REQUIRE([AM_DEP_TRACK])dnl
-
-ifelse([$1], CC, [depcc="$CC" am_compiler_list=],
- [$1], CXX, [depcc="$CXX" am_compiler_list=],
- [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
- [$1], UPC, [depcc="$UPC" am_compiler_list=],
- [$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'],
- [depcc="$$1" am_compiler_list=])
-
-AC_CACHE_CHECK([dependency style of $depcc],
- [am_cv_$1_dependencies_compiler_type],
-[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
- # We make a subdir and do the tests there. Otherwise we can end up
- # making bogus files that we don't know about and never remove. For
- # instance it was reported that on HP-UX the gcc test will end up
- # making a dummy file named `D' -- because `-MD' means `put the output
- # in D'.
- mkdir conftest.dir
- # Copy depcomp to subdir because otherwise we won't find it if we're
- # using a relative directory.
- cp "$am_depcomp" conftest.dir
- cd conftest.dir
- # We will build objects and dependencies in a subdirectory because
- # it helps to detect inapplicable dependency modes. For instance
- # both Tru64's cc and ICC support -MD to output dependencies as a
- # side effect of compilation, but ICC will put the dependencies in
- # the current directory while Tru64 will put them in the object
- # directory.
- mkdir sub
-
- am_cv_$1_dependencies_compiler_type=none
- if test "$am_compiler_list" = ""; then
- am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
- fi
- am__universal=false
- m4_case([$1], [CC],
- [case " $depcc " in #(
- *\ -arch\ *\ -arch\ *) am__universal=true ;;
- esac],
- [CXX],
- [case " $depcc " in #(
- *\ -arch\ *\ -arch\ *) am__universal=true ;;
- esac])
-
- for depmode in $am_compiler_list; do
- # Setup a source with many dependencies, because some compilers
- # like to wrap large dependency lists on column 80 (with \), and
- # we should not choose a depcomp mode which is confused by this.
- #
- # We need to recreate these files for each test, as the compiler may
- # overwrite some of them when testing with obscure command lines.
- # This happens at least with the AIX C compiler.
- : > sub/conftest.c
- for i in 1 2 3 4 5 6; do
- echo '#include "conftst'$i'.h"' >> sub/conftest.c
- # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
- # Solaris 8's {/usr,}/bin/sh.
- touch sub/conftst$i.h
- done
- echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
- # We check with `-c' and `-o' for the sake of the "dashmstdout"
- # mode. It turns out that the SunPro C++ compiler does not properly
- # handle `-M -o', and we need to detect this. Also, some Intel
- # versions had trouble with output in subdirs
- am__obj=sub/conftest.${OBJEXT-o}
- am__minus_obj="-o $am__obj"
- case $depmode in
- gcc)
- # This depmode causes a compiler race in universal mode.
- test "$am__universal" = false || continue
- ;;
- nosideeffect)
- # after this tag, mechanisms are not by side-effect, so they'll
- # only be used when explicitly requested
- if test "x$enable_dependency_tracking" = xyes; then
- continue
- else
- break
- fi
- ;;
- msvisualcpp | msvcmsys)
- # This compiler won't grok `-c -o', but also, the minuso test has
- # not run yet. These depmodes are late enough in the game, and
- # so weak that their functioning should not be impacted.
- am__obj=conftest.${OBJEXT-o}
- am__minus_obj=
- ;;
- none) break ;;
- esac
- if depmode=$depmode \
- source=sub/conftest.c object=$am__obj \
- depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
- $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
- >/dev/null 2>conftest.err &&
- grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
- grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
- grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
- ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
- # icc doesn't choke on unknown options, it will just issue warnings
- # or remarks (even with -Werror). So we grep stderr for any message
- # that says an option was ignored or not supported.
- # When given -MP, icc 7.0 and 7.1 complain thusly:
- # icc: Command line warning: ignoring option '-M'; no argument required
- # The diagnosis changed in icc 8.0:
- # icc: Command line remark: option '-MP' not supported
- if (grep 'ignoring option' conftest.err ||
- grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
- am_cv_$1_dependencies_compiler_type=$depmode
- break
- fi
- fi
- done
-
- cd ..
- rm -rf conftest.dir
-else
- am_cv_$1_dependencies_compiler_type=none
-fi
-])
-AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
-AM_CONDITIONAL([am__fastdep$1], [
- test "x$enable_dependency_tracking" != xno \
- && test "$am_cv_$1_dependencies_compiler_type" = gcc3])
-])
-
-
-# AM_SET_DEPDIR
-# -------------
-# Choose a directory name for dependency files.
-# This macro is AC_REQUIREd in _AM_DEPENDENCIES
-AC_DEFUN([AM_SET_DEPDIR],
-[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
-AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
-])
-
-
-# AM_DEP_TRACK
-# ------------
-AC_DEFUN([AM_DEP_TRACK],
-[AC_ARG_ENABLE(dependency-tracking,
-[ --disable-dependency-tracking speeds up one-time build
- --enable-dependency-tracking do not reject slow dependency extractors])
-if test "x$enable_dependency_tracking" != xno; then
- am_depcomp="$ac_aux_dir/depcomp"
- AMDEPBACKSLASH='\'
-fi
-AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
-AC_SUBST([AMDEPBACKSLASH])dnl
-_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
-])
-
-# Generate code to set up dependency tracking. -*- Autoconf -*-
-
-# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008
-# Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-#serial 5
-
-# _AM_OUTPUT_DEPENDENCY_COMMANDS
-# ------------------------------
-AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
-[{
- # Autoconf 2.62 quotes --file arguments for eval, but not when files
- # are listed without --file. Let's play safe and only enable the eval
- # if we detect the quoting.
- case $CONFIG_FILES in
- *\'*) eval set x "$CONFIG_FILES" ;;
- *) set x $CONFIG_FILES ;;
- esac
- shift
- for mf
- do
- # Strip MF so we end up with the name of the file.
- mf=`echo "$mf" | sed -e 's/:.*$//'`
- # Check whether this is an Automake generated Makefile or not.
- # We used to match only the files named `Makefile.in', but
- # some people rename them; so instead we look at the file content.
- # Grep'ing the first line is not enough: some people post-process
- # each Makefile.in and add a new line on top of each file to say so.
- # Grep'ing the whole file is not good either: AIX grep has a line
- # limit of 2048, but all sed's we know have understand at least 4000.
- if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
- dirpart=`AS_DIRNAME("$mf")`
- else
- continue
- fi
- # Extract the definition of DEPDIR, am__include, and am__quote
- # from the Makefile without running `make'.
- DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
- test -z "$DEPDIR" && continue
- am__include=`sed -n 's/^am__include = //p' < "$mf"`
- test -z "am__include" && continue
- am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
- # When using ansi2knr, U may be empty or an underscore; expand it
- U=`sed -n 's/^U = //p' < "$mf"`
- # Find all dependency output files, they are included files with
- # $(DEPDIR) in their names. We invoke sed twice because it is the
- # simplest approach to changing $(DEPDIR) to its actual value in the
- # expansion.
- for file in `sed -n "
- s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
- sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
- # Make sure the directory exists.
- test -f "$dirpart/$file" && continue
- fdir=`AS_DIRNAME(["$file"])`
- AS_MKDIR_P([$dirpart/$fdir])
- # echo "creating $dirpart/$file"
- echo '# dummy' > "$dirpart/$file"
- done
- done
-}
-])# _AM_OUTPUT_DEPENDENCY_COMMANDS
-
-
-# AM_OUTPUT_DEPENDENCY_COMMANDS
-# -----------------------------
-# This macro should only be invoked once -- use via AC_REQUIRE.
-#
-# This code is only required when automatic dependency tracking
-# is enabled. FIXME. This creates each `.P' file that we will
-# need in order to bootstrap the dependency handling code.
-AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
-[AC_CONFIG_COMMANDS([depfiles],
- [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
- [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
-])
-
-# Do all the work for Automake. -*- Autoconf -*-
-
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
-# 2005, 2006, 2008, 2009 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 16
-
-# This macro actually does too much. Some checks are only needed if
-# your package does certain things. But this isn't really a big deal.
-
-# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
-# AM_INIT_AUTOMAKE([OPTIONS])
-# -----------------------------------------------
-# The call with PACKAGE and VERSION arguments is the old style
-# call (pre autoconf-2.50), which is being phased out. PACKAGE
-# and VERSION should now be passed to AC_INIT and removed from
-# the call to AM_INIT_AUTOMAKE.
-# We support both call styles for the transition. After
-# the next Automake release, Autoconf can make the AC_INIT
-# arguments mandatory, and then we can depend on a new Autoconf
-# release and drop the old call support.
-AC_DEFUN([AM_INIT_AUTOMAKE],
-[AC_PREREQ([2.62])dnl
-dnl Autoconf wants to disallow AM_ names. We explicitly allow
-dnl the ones we care about.
-m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
-AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
-AC_REQUIRE([AC_PROG_INSTALL])dnl
-if test "`cd $srcdir && pwd`" != "`pwd`"; then
- # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
- # is not polluted with repeated "-I."
- AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
- # test to see if srcdir already configured
- if test -f $srcdir/config.status; then
- AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
- fi
-fi
-
-# test whether we have cygpath
-if test -z "$CYGPATH_W"; then
- if (cygpath --version) >/dev/null 2>/dev/null; then
- CYGPATH_W='cygpath -w'
- else
- CYGPATH_W=echo
- fi
-fi
-AC_SUBST([CYGPATH_W])
-
-# Define the identity of the package.
-dnl Distinguish between old-style and new-style calls.
-m4_ifval([$2],
-[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
- AC_SUBST([PACKAGE], [$1])dnl
- AC_SUBST([VERSION], [$2])],
-[_AM_SET_OPTIONS([$1])dnl
-dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
-m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,,
- [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
- AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
- AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
-
-_AM_IF_OPTION([no-define],,
-[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package])
- AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl
-
-# Some tools Automake needs.
-AC_REQUIRE([AM_SANITY_CHECK])dnl
-AC_REQUIRE([AC_ARG_PROGRAM])dnl
-AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version})
-AM_MISSING_PROG(AUTOCONF, autoconf)
-AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version})
-AM_MISSING_PROG(AUTOHEADER, autoheader)
-AM_MISSING_PROG(MAKEINFO, makeinfo)
-AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
-AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
-AC_REQUIRE([AM_PROG_MKDIR_P])dnl
-# We need awk for the "check" target. The system "awk" is bad on
-# some platforms.
-AC_REQUIRE([AC_PROG_AWK])dnl
-AC_REQUIRE([AC_PROG_MAKE_SET])dnl
-AC_REQUIRE([AM_SET_LEADING_DOT])dnl
-_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
- [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
- [_AM_PROG_TAR([v7])])])
-_AM_IF_OPTION([no-dependencies],,
-[AC_PROVIDE_IFELSE([AC_PROG_CC],
- [_AM_DEPENDENCIES(CC)],
- [define([AC_PROG_CC],
- defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl
-AC_PROVIDE_IFELSE([AC_PROG_CXX],
- [_AM_DEPENDENCIES(CXX)],
- [define([AC_PROG_CXX],
- defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl
-AC_PROVIDE_IFELSE([AC_PROG_OBJC],
- [_AM_DEPENDENCIES(OBJC)],
- [define([AC_PROG_OBJC],
- defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl
-])
-_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl
-dnl The `parallel-tests' driver may need to know about EXEEXT, so add the
-dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro
-dnl is hooked onto _AC_COMPILER_EXEEXT early, see below.
-AC_CONFIG_COMMANDS_PRE(dnl
-[m4_provide_if([_AM_COMPILER_EXEEXT],
- [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl
-])
-
-dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not
-dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further
-dnl mangled by Autoconf and run in a shell conditional statement.
-m4_define([_AC_COMPILER_EXEEXT],
-m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])])
-
-
-# When config.status generates a header, we must update the stamp-h file.
-# This file resides in the same directory as the config header
-# that is generated. The stamp files are numbered to have different names.
-
-# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
-# loop where config.status creates the headers, so we can generate
-# our stamp files there.
-AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
-[# Compute $1's index in $config_headers.
-_am_arg=$1
-_am_stamp_count=1
-for _am_header in $config_headers :; do
- case $_am_header in
- $_am_arg | $_am_arg:* )
- break ;;
- * )
- _am_stamp_count=`expr $_am_stamp_count + 1` ;;
- esac
-done
-echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
-
-# Copyright (C) 2001, 2003, 2005, 2008 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# AM_PROG_INSTALL_SH
-# ------------------
-# Define $install_sh.
-AC_DEFUN([AM_PROG_INSTALL_SH],
-[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
-if test x"${install_sh}" != xset; then
- case $am_aux_dir in
- *\ * | *\ *)
- install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
- *)
- install_sh="\${SHELL} $am_aux_dir/install-sh"
- esac
-fi
-AC_SUBST(install_sh)])
-
-# Copyright (C) 2003, 2005 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 2
-
-# Check whether the underlying file-system supports filenames
-# with a leading dot. For instance MS-DOS doesn't.
-AC_DEFUN([AM_SET_LEADING_DOT],
-[rm -rf .tst 2>/dev/null
-mkdir .tst 2>/dev/null
-if test -d .tst; then
- am__leading_dot=.
-else
- am__leading_dot=_
-fi
-rmdir .tst 2>/dev/null
-AC_SUBST([am__leading_dot])])
-
-# Check to see how 'make' treats includes. -*- Autoconf -*-
-
-# Copyright (C) 2001, 2002, 2003, 2005, 2009 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 4
-
-# AM_MAKE_INCLUDE()
-# -----------------
-# Check to see how make treats includes.
-AC_DEFUN([AM_MAKE_INCLUDE],
-[am_make=${MAKE-make}
-cat > confinc << 'END'
-am__doit:
- @echo this is the am__doit target
-.PHONY: am__doit
-END
-# If we don't find an include directive, just comment out the code.
-AC_MSG_CHECKING([for style of include used by $am_make])
-am__include="#"
-am__quote=
-_am_result=none
-# First try GNU make style include.
-echo "include confinc" > confmf
-# Ignore all kinds of additional output from `make'.
-case `$am_make -s -f confmf 2> /dev/null` in #(
-*the\ am__doit\ target*)
- am__include=include
- am__quote=
- _am_result=GNU
- ;;
-esac
-# Now try BSD make style include.
-if test "$am__include" = "#"; then
- echo '.include "confinc"' > confmf
- case `$am_make -s -f confmf 2> /dev/null` in #(
- *the\ am__doit\ target*)
- am__include=.include
- am__quote="\""
- _am_result=BSD
- ;;
- esac
-fi
-AC_SUBST([am__include])
-AC_SUBST([am__quote])
-AC_MSG_RESULT([$_am_result])
-rm -f confinc confmf
-])
-
-# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*-
-
-# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008
-# Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 6
-
-# AM_MISSING_PROG(NAME, PROGRAM)
-# ------------------------------
-AC_DEFUN([AM_MISSING_PROG],
-[AC_REQUIRE([AM_MISSING_HAS_RUN])
-$1=${$1-"${am_missing_run}$2"}
-AC_SUBST($1)])
-
-
-# AM_MISSING_HAS_RUN
-# ------------------
-# Define MISSING if not defined so far and test if it supports --run.
-# If it does, set am_missing_run to use it, otherwise, to nothing.
-AC_DEFUN([AM_MISSING_HAS_RUN],
-[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
-AC_REQUIRE_AUX_FILE([missing])dnl
-if test x"${MISSING+set}" != xset; then
- case $am_aux_dir in
- *\ * | *\ *)
- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
- *)
- MISSING="\${SHELL} $am_aux_dir/missing" ;;
- esac
-fi
-# Use eval to expand $SHELL
-if eval "$MISSING --run true"; then
- am_missing_run="$MISSING --run "
-else
- am_missing_run=
- AC_MSG_WARN([`missing' script is too old or missing])
-fi
-])
-
-# Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# AM_PROG_MKDIR_P
-# ---------------
-# Check for `mkdir -p'.
-AC_DEFUN([AM_PROG_MKDIR_P],
-[AC_PREREQ([2.60])dnl
-AC_REQUIRE([AC_PROG_MKDIR_P])dnl
-dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P,
-dnl while keeping a definition of mkdir_p for backward compatibility.
-dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile.
-dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of
-dnl Makefile.ins that do not define MKDIR_P, so we do our own
-dnl adjustment using top_builddir (which is defined more often than
-dnl MKDIR_P).
-AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl
-case $mkdir_p in
- [[\\/$]]* | ?:[[\\/]]*) ;;
- */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
-esac
-])
-
-# Helper functions for option handling. -*- Autoconf -*-
-
-# Copyright (C) 2001, 2002, 2003, 2005, 2008 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 4
-
-# _AM_MANGLE_OPTION(NAME)
-# -----------------------
-AC_DEFUN([_AM_MANGLE_OPTION],
-[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
-
-# _AM_SET_OPTION(NAME)
-# ------------------------------
-# Set option NAME. Presently that only means defining a flag for this option.
-AC_DEFUN([_AM_SET_OPTION],
-[m4_define(_AM_MANGLE_OPTION([$1]), 1)])
-
-# _AM_SET_OPTIONS(OPTIONS)
-# ----------------------------------
-# OPTIONS is a space-separated list of Automake options.
-AC_DEFUN([_AM_SET_OPTIONS],
-[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
-
-# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
-# -------------------------------------------
-# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
-AC_DEFUN([_AM_IF_OPTION],
-[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
-
-# Check to make sure that the build environment is sane. -*- Autoconf -*-
-
-# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008
-# Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 5
-
-# AM_SANITY_CHECK
-# ---------------
-AC_DEFUN([AM_SANITY_CHECK],
-[AC_MSG_CHECKING([whether build environment is sane])
-# Just in case
-sleep 1
-echo timestamp > conftest.file
-# Reject unsafe characters in $srcdir or the absolute working directory
-# name. Accept space and tab only in the latter.
-am_lf='
-'
-case `pwd` in
- *[[\\\"\#\$\&\'\`$am_lf]]*)
- AC_MSG_ERROR([unsafe absolute working directory name]);;
-esac
-case $srcdir in
- *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*)
- AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);;
-esac
-
-# Do `set' in a subshell so we don't clobber the current shell's
-# arguments. Must try -L first in case configure is actually a
-# symlink; some systems play weird games with the mod time of symlinks
-# (eg FreeBSD returns the mod time of the symlink's containing
-# directory).
-if (
- set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
- if test "$[*]" = "X"; then
- # -L didn't work.
- set X `ls -t "$srcdir/configure" conftest.file`
- fi
- rm -f conftest.file
- if test "$[*]" != "X $srcdir/configure conftest.file" \
- && test "$[*]" != "X conftest.file $srcdir/configure"; then
-
- # If neither matched, then we have a broken ls. This can happen
- # if, for instance, CONFIG_SHELL is bash and it inherits a
- # broken ls alias from the environment. This has actually
- # happened. Such a system could not be considered "sane".
- AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken
-alias in your environment])
- fi
-
- test "$[2]" = conftest.file
- )
-then
- # Ok.
- :
-else
- AC_MSG_ERROR([newly created file is older than distributed files!
-Check your system clock])
-fi
-AC_MSG_RESULT(yes)])
-
-# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# AM_PROG_INSTALL_STRIP
-# ---------------------
-# One issue with vendor `install' (even GNU) is that you can't
-# specify the program used to strip binaries. This is especially
-# annoying in cross-compiling environments, where the build's strip
-# is unlikely to handle the host's binaries.
-# Fortunately install-sh will honor a STRIPPROG variable, so we
-# always use install-sh in `make install-strip', and initialize
-# STRIPPROG with the value of the STRIP variable (set by the user).
-AC_DEFUN([AM_PROG_INSTALL_STRIP],
-[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
-# Installed binaries are usually stripped using `strip' when the user
-# run `make install-strip'. However `strip' might not be the right
-# tool to use in cross-compilation environments, therefore Automake
-# will honor the `STRIP' environment variable to overrule this program.
-dnl Don't test for $cross_compiling = yes, because it might be `maybe'.
-if test "$cross_compiling" != no; then
- AC_CHECK_TOOL([STRIP], [strip], :)
-fi
-INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
-AC_SUBST([INSTALL_STRIP_PROGRAM])])
-
-# Copyright (C) 2006, 2008 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 2
-
-# _AM_SUBST_NOTMAKE(VARIABLE)
-# ---------------------------
-# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in.
-# This macro is traced by Automake.
-AC_DEFUN([_AM_SUBST_NOTMAKE])
-
-# AM_SUBST_NOTMAKE(VARIABLE)
-# ---------------------------
-# Public sister of _AM_SUBST_NOTMAKE.
-AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
-
-# Check how to create a tarball. -*- Autoconf -*-
-
-# Copyright (C) 2004, 2005 Free Software Foundation, Inc.
-#
-# This file is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# serial 2
-
-# _AM_PROG_TAR(FORMAT)
-# --------------------
-# Check how to create a tarball in format FORMAT.
-# FORMAT should be one of `v7', `ustar', or `pax'.
-#
-# Substitute a variable $(am__tar) that is a command
-# writing to stdout a FORMAT-tarball containing the directory
-# $tardir.
-# tardir=directory && $(am__tar) > result.tar
-#
-# Substitute a variable $(am__untar) that extract such
-# a tarball read from stdin.
-# $(am__untar) < result.tar
-AC_DEFUN([_AM_PROG_TAR],
-[# Always define AMTAR for backward compatibility.
-AM_MISSING_PROG([AMTAR], [tar])
-m4_if([$1], [v7],
- [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'],
- [m4_case([$1], [ustar],, [pax],,
- [m4_fatal([Unknown tar format])])
-AC_MSG_CHECKING([how to create a $1 tar archive])
-# Loop over all known methods to create a tar archive until one works.
-_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
-_am_tools=${am_cv_prog_tar_$1-$_am_tools}
-# Do not fold the above two line into one, because Tru64 sh and
-# Solaris sh will not grok spaces in the rhs of `-'.
-for _am_tool in $_am_tools
-do
- case $_am_tool in
- gnutar)
- for _am_tar in tar gnutar gtar;
- do
- AM_RUN_LOG([$_am_tar --version]) && break
- done
- am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
- am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
- am__untar="$_am_tar -xf -"
- ;;
- plaintar)
- # Must skip GNU tar: if it does not support --format= it doesn't create
- # ustar tarball either.
- (tar --version) >/dev/null 2>&1 && continue
- am__tar='tar chf - "$$tardir"'
- am__tar_='tar chf - "$tardir"'
- am__untar='tar xf -'
- ;;
- pax)
- am__tar='pax -L -x $1 -w "$$tardir"'
- am__tar_='pax -L -x $1 -w "$tardir"'
- am__untar='pax -r'
- ;;
- cpio)
- am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
- am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
- am__untar='cpio -i -H $1 -d'
- ;;
- none)
- am__tar=false
- am__tar_=false
- am__untar=false
- ;;
- esac
-
- # If the value was cached, stop now. We just wanted to have am__tar
- # and am__untar set.
- test -n "${am_cv_prog_tar_$1}" && break
-
- # tar/untar a dummy directory, and stop if the command works
- rm -rf conftest.dir
- mkdir conftest.dir
- echo GrepMe > conftest.dir/file
- AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
- rm -rf conftest.dir
- if test -s conftest.tar; then
- AM_RUN_LOG([$am__untar <conftest.tar])
- grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
- fi
-done
-rm -rf conftest.dir
-
-AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
-AC_MSG_RESULT([$am_cv_prog_tar_$1])])
-AC_SUBST([am__tar])
-AC_SUBST([am__untar])
-]) # _AM_PROG_TAR
-
-m4_include([m4/boost.m4])
-m4_include([m4/libtool.m4])
-m4_include([m4/ltoptions.m4])
-m4_include([m4/ltsugar.m4])
-m4_include([m4/ltversion.m4])
-m4_include([m4/lt~obsolete.m4])
diff --git a/scripts/training/compact-rule-table/config.guess b/scripts/training/compact-rule-table/config.guess
deleted file mode 100755
index 666c5ad92..000000000
--- a/scripts/training/compact-rule-table/config.guess
+++ /dev/null
@@ -1,1511 +0,0 @@
-#! /bin/sh
-# Attempt to guess a canonical system name.
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
-# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
-# Free Software Foundation, Inc.
-
-timestamp='2009-11-20'
-
-# This file is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
-# 02110-1301, USA.
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-
-# Originally written by Per Bothner. Please send patches (context
-# diff format) to <config-patches@gnu.org> and include a ChangeLog
-# entry.
-#
-# This script attempts to guess a canonical system name similar to
-# config.sub. If it succeeds, it prints the system name on stdout, and
-# exits with 0. Otherwise, it exits with 1.
-#
-# You can get the latest version of this script from:
-# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
-
-me=`echo "$0" | sed -e 's,.*/,,'`
-
-usage="\
-Usage: $0 [OPTION]
-
-Output the configuration name of the system \`$me' is run on.
-
-Operation modes:
- -h, --help print this help, then exit
- -t, --time-stamp print date of last modification, then exit
- -v, --version print version number, then exit
-
-Report bugs and patches to <config-patches@gnu.org>."
-
-version="\
-GNU config.guess ($timestamp)
-
-Originally written by Per Bothner.
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
-2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions. There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
- case $1 in
- --time-stamp | --time* | -t )
- echo "$timestamp" ; exit ;;
- --version | -v )
- echo "$version" ; exit ;;
- --help | --h* | -h )
- echo "$usage"; exit ;;
- -- ) # Stop option processing
- shift; break ;;
- - ) # Use stdin as input.
- break ;;
- -* )
- echo "$me: invalid option $1$help" >&2
- exit 1 ;;
- * )
- break ;;
- esac
-done
-
-if test $# != 0; then
- echo "$me: too many arguments$help" >&2
- exit 1
-fi
-
-trap 'exit 1' 1 2 15
-
-# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
-# compiler to aid in system detection is discouraged as it requires
-# temporary files to be created and, as you can see below, it is a
-# headache to deal with in a portable fashion.
-
-# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
-# use `HOST_CC' if defined, but it is deprecated.
-
-# Portable tmp directory creation inspired by the Autoconf team.
-
-set_cc_for_build='
-trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
-trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
-: ${TMPDIR=/tmp} ;
- { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
- { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
- { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
- { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
-dummy=$tmp/dummy ;
-tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
-case $CC_FOR_BUILD,$HOST_CC,$CC in
- ,,) echo "int x;" > $dummy.c ;
- for c in cc gcc c89 c99 ; do
- if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
- CC_FOR_BUILD="$c"; break ;
- fi ;
- done ;
- if test x"$CC_FOR_BUILD" = x ; then
- CC_FOR_BUILD=no_compiler_found ;
- fi
- ;;
- ,,*) CC_FOR_BUILD=$CC ;;
- ,*,*) CC_FOR_BUILD=$HOST_CC ;;
-esac ; set_cc_for_build= ;'
-
-# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
-# (ghazi@noc.rutgers.edu 1994-08-24)
-if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
- PATH=$PATH:/.attbin ; export PATH
-fi
-
-UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
-UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
-UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
-UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
-
-case "${UNAME_MACHINE}" in
- i?86)
- test -z "$VENDOR" && VENDOR=pc
- ;;
- *)
- test -z "$VENDOR" && VENDOR=unknown
- ;;
-esac
-test -f /etc/SuSE-release -o -f /.buildenv && VENDOR=suse
-
-# Note: order is significant - the case branches are not exclusive.
-
-case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
- *:NetBSD:*:*)
- # NetBSD (nbsd) targets should (where applicable) match one or
- # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
- # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
- # switched to ELF, *-*-netbsd* would select the old
- # object file format. This provides both forward
- # compatibility and a consistent mechanism for selecting the
- # object file format.
- #
- # Note: NetBSD doesn't particularly care about the vendor
- # portion of the name. We always set it to "unknown".
- sysctl="sysctl -n hw.machine_arch"
- UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
- /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
- case "${UNAME_MACHINE_ARCH}" in
- armeb) machine=armeb-unknown ;;
- arm*) machine=arm-unknown ;;
- sh3el) machine=shl-unknown ;;
- sh3eb) machine=sh-unknown ;;
- sh5el) machine=sh5le-unknown ;;
- *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
- esac
- # The Operating System including object format, if it has switched
- # to ELF recently, or will in the future.
- case "${UNAME_MACHINE_ARCH}" in
- arm*|i386|m68k|ns32k|sh3*|sparc|vax)
- eval $set_cc_for_build
- if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
- | grep -q __ELF__
- then
- # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
- # Return netbsd for either. FIX?
- os=netbsd
- else
- os=netbsdelf
- fi
- ;;
- *)
- os=netbsd
- ;;
- esac
- # The OS release
- # Debian GNU/NetBSD machines have a different userland, and
- # thus, need a distinct triplet. However, they do not need
- # kernel version information, so it can be replaced with a
- # suitable tag, in the style of linux-gnu.
- case "${UNAME_VERSION}" in
- Debian*)
- release='-gnu'
- ;;
- *)
- release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
- ;;
- esac
- # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
- # contains redundant information, the shorter form:
- # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
- echo "${machine}-${os}${release}"
- exit ;;
- *:OpenBSD:*:*)
- UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
- echo ${UNAME_MACHINE_ARCH}-${VENDOR}-openbsd${UNAME_RELEASE}
- exit ;;
- *:ekkoBSD:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-ekkobsd${UNAME_RELEASE}
- exit ;;
- *:SolidBSD:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-solidbsd${UNAME_RELEASE}
- exit ;;
- macppc:MirBSD:*:*)
- echo powerpc-${VENDOR}-mirbsd${UNAME_RELEASE}
- exit ;;
- *:MirBSD:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-mirbsd${UNAME_RELEASE}
- exit ;;
- alpha:OSF1:*:*)
- case $UNAME_RELEASE in
- *4.0)
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
- ;;
- *5.*)
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
- ;;
- esac
- # According to Compaq, /usr/sbin/psrinfo has been available on
- # OSF/1 and Tru64 systems produced since 1995. I hope that
- # covers most systems running today. This code pipes the CPU
- # types through head -n 1, so we only detect the type of CPU 0.
- ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
- case "$ALPHA_CPU_TYPE" in
- "EV4 (21064)")
- UNAME_MACHINE="alpha" ;;
- "EV4.5 (21064)")
- UNAME_MACHINE="alpha" ;;
- "LCA4 (21066/21068)")
- UNAME_MACHINE="alpha" ;;
- "EV5 (21164)")
- UNAME_MACHINE="alphaev5" ;;
- "EV5.6 (21164A)")
- UNAME_MACHINE="alphaev56" ;;
- "EV5.6 (21164PC)")
- UNAME_MACHINE="alphapca56" ;;
- "EV5.7 (21164PC)")
- UNAME_MACHINE="alphapca57" ;;
- "EV6 (21264)")
- UNAME_MACHINE="alphaev6" ;;
- "EV6.7 (21264A)")
- UNAME_MACHINE="alphaev67" ;;
- "EV6.8CB (21264C)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.8AL (21264B)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.8CX (21264D)")
- UNAME_MACHINE="alphaev68" ;;
- "EV6.9A (21264/EV69A)")
- UNAME_MACHINE="alphaev69" ;;
- "EV7 (21364)")
- UNAME_MACHINE="alphaev7" ;;
- "EV7.9 (21364A)")
- UNAME_MACHINE="alphaev79" ;;
- esac
- # A Pn.n version is a patched version.
- # A Vn.n version is a released version.
- # A Tn.n version is a released field test version.
- # A Xn.n version is an unreleased experimental baselevel.
- # 1.2 uses "1.2" for uname -r.
- echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
- exit ;;
- Alpha\ *:Windows_NT*:*)
- # How do we know it's Interix rather than the generic POSIX subsystem?
- # Should we change UNAME_MACHINE based on the output of uname instead
- # of the specific Alpha model?
- echo alpha-pc-interix
- exit ;;
- 21064:Windows_NT:50:3)
- echo alpha-dec-winnt3.5
- exit ;;
- Amiga*:UNIX_System_V:4.0:*)
- echo m68k-${VENDOR}-sysv4
- exit ;;
- *:[Aa]miga[Oo][Ss]:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-amigaos
- exit ;;
- *:[Mm]orph[Oo][Ss]:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-morphos
- exit ;;
- *:OS/390:*:*)
- echo i370-ibm-openedition
- exit ;;
- *:z/VM:*:*)
- echo s390-ibm-zvmoe
- exit ;;
- *:OS400:*:*)
- echo powerpc-ibm-os400
- exit ;;
- arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
- echo arm-acorn-riscix${UNAME_RELEASE}
- exit ;;
- arm:riscos:*:*|arm:RISCOS:*:*)
- echo arm-${VENDOR}-riscos
- exit ;;
- SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
- echo hppa1.1-hitachi-hiuxmpp
- exit ;;
- Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
- # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
- if test "`(/bin/universe) 2>/dev/null`" = att ; then
- echo pyramid-pyramid-sysv3
- else
- echo pyramid-pyramid-bsd
- fi
- exit ;;
- NILE*:*:*:dcosx)
- echo pyramid-pyramid-svr4
- exit ;;
- DRS?6000:unix:4.0:6*)
- echo sparc-icl-nx6
- exit ;;
- DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
- case `/usr/bin/uname -p` in
- sparc) echo sparc-icl-nx7; exit ;;
- esac ;;
- s390x:SunOS:*:*)
- echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4H:SunOS:5.*:*)
- echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
- echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
- echo i386-pc-auroraux${UNAME_RELEASE}
- exit ;;
- i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
- eval $set_cc_for_build
- SUN_ARCH="i386"
- # If there is a compiler, see if it is configured for 64-bit objects.
- # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
- # This test works for both compilers.
- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
- if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
- grep IS_64BIT_ARCH >/dev/null
- then
- SUN_ARCH="x86_64"
- fi
- fi
- echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:6*:*)
- # According to config.sub, this is the proper way to canonicalize
- # SunOS6. Hard to guess exactly what SunOS6 will be like, but
- # it's likely to be more like Solaris than SunOS4.
- echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- sun4*:SunOS:*:*)
- case "`/usr/bin/arch -k`" in
- Series*|S4*)
- UNAME_RELEASE=`uname -v`
- ;;
- esac
- # Japanese Language versions have a version number like `4.1.3-JL'.
- echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
- exit ;;
- sun3*:SunOS:*:*)
- echo m68k-sun-sunos${UNAME_RELEASE}
- exit ;;
- sun*:*:4.2BSD:*)
- UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
- test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
- case "`/bin/arch`" in
- sun3)
- echo m68k-sun-sunos${UNAME_RELEASE}
- ;;
- sun4)
- echo sparc-sun-sunos${UNAME_RELEASE}
- ;;
- esac
- exit ;;
- aushp:SunOS:*:*)
- echo sparc-auspex-sunos${UNAME_RELEASE}
- exit ;;
- # The situation for MiNT is a little confusing. The machine name
- # can be virtually everything (everything which is not
- # "atarist" or "atariste" at least should have a processor
- # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
- # to the lowercase version "mint" (or "freemint"). Finally
- # the system name "TOS" denotes a system which is actually not
- # MiNT. But MiNT is downward compatible to TOS, so this should
- # be no problem.
- atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
- echo m68k-atari-mint${UNAME_RELEASE}
- exit ;;
- milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
- echo m68k-milan-mint${UNAME_RELEASE}
- exit ;;
- hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
- echo m68k-hades-mint${UNAME_RELEASE}
- exit ;;
- *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
- echo m68k-${VENDOR}-mint${UNAME_RELEASE}
- exit ;;
- m68k:machten:*:*)
- echo m68k-apple-machten${UNAME_RELEASE}
- exit ;;
- powerpc:machten:*:*)
- echo powerpc-apple-machten${UNAME_RELEASE}
- exit ;;
- RISC*:Mach:*:*)
- echo mips-dec-mach_bsd4.3
- exit ;;
- RISC*:ULTRIX:*:*)
- echo mips-dec-ultrix${UNAME_RELEASE}
- exit ;;
- VAX*:ULTRIX*:*:*)
- echo vax-dec-ultrix${UNAME_RELEASE}
- exit ;;
- 2020:CLIX:*:* | 2430:CLIX:*:*)
- echo clipper-intergraph-clix${UNAME_RELEASE}
- exit ;;
- mips:*:*:UMIPS | mips:*:*:RISCos)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
-#ifdef __cplusplus
-#include <stdio.h> /* for printf() prototype */
- int main (int argc, char *argv[]) {
-#else
- int main (argc, argv) int argc; char *argv[]; {
-#endif
- #if defined (host_mips) && defined (MIPSEB)
- #if defined (SYSTYPE_SYSV)
- printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
- #endif
- #if defined (SYSTYPE_SVR4)
- printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
- #endif
- #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
- printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
- #endif
- #endif
- exit (-1);
- }
-EOF
- $CC_FOR_BUILD -o $dummy $dummy.c &&
- dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
- SYSTEM_NAME=`$dummy $dummyarg` &&
- { echo "$SYSTEM_NAME"; exit; }
- echo mips-mips-riscos${UNAME_RELEASE}
- exit ;;
- Motorola:PowerMAX_OS:*:*)
- echo powerpc-motorola-powermax
- exit ;;
- Motorola:*:4.3:PL8-*)
- echo powerpc-harris-powermax
- exit ;;
- Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
- echo powerpc-harris-powermax
- exit ;;
- Night_Hawk:Power_UNIX:*:*)
- echo powerpc-harris-powerunix
- exit ;;
- m88k:CX/UX:7*:*)
- echo m88k-harris-cxux7
- exit ;;
- m88k:*:4*:R4*)
- echo m88k-motorola-sysv4
- exit ;;
- m88k:*:3*:R3*)
- echo m88k-motorola-sysv3
- exit ;;
- AViiON:dgux:*:*)
- # DG/UX returns AViiON for all architectures
- UNAME_PROCESSOR=`/usr/bin/uname -p`
- if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
- then
- if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
- [ ${TARGET_BINARY_INTERFACE}x = x ]
- then
- echo m88k-dg-dgux${UNAME_RELEASE}
- else
- echo m88k-dg-dguxbcs${UNAME_RELEASE}
- fi
- else
- echo i586-dg-dgux${UNAME_RELEASE}
- fi
- exit ;;
- M88*:DolphinOS:*:*) # DolphinOS (SVR3)
- echo m88k-dolphin-sysv3
- exit ;;
- M88*:*:R3*:*)
- # Delta 88k system running SVR3
- echo m88k-motorola-sysv3
- exit ;;
- XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
- echo m88k-tektronix-sysv3
- exit ;;
- Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
- echo m68k-tektronix-bsd
- exit ;;
- *:IRIX*:*:*)
- echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
- exit ;;
- ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
- echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id
- exit ;; # Note that: echo "'`uname -s`'" gives 'AIX '
- i*86:AIX:*:*)
- echo i386-ibm-aix
- exit ;;
- ia64:AIX:*:*)
- if [ -x /usr/bin/oslevel ] ; then
- IBM_REV=`/usr/bin/oslevel`
- else
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
- fi
- echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
- exit ;;
- *:AIX:2:3)
- if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #include <sys/systemcfg.h>
-
- main()
- {
- if (!__power_pc())
- exit(1);
- puts("powerpc-ibm-aix3.2.5");
- exit(0);
- }
-EOF
- if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
- then
- echo "$SYSTEM_NAME"
- else
- echo rs6000-ibm-aix3.2.5
- fi
- elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
- echo rs6000-ibm-aix3.2.4
- else
- echo rs6000-ibm-aix3.2
- fi
- exit ;;
- *:AIX:*:[456])
- IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
- if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
- IBM_ARCH=rs6000
- else
- IBM_ARCH=powerpc
- fi
- if [ -x /usr/bin/oslevel ] ; then
- IBM_REV=`/usr/bin/oslevel`
- else
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
- fi
- echo ${IBM_ARCH}-ibm-aix${IBM_REV}
- exit ;;
- *:AIX:*:*)
- echo rs6000-ibm-aix
- exit ;;
- ibmrt:4.4BSD:*|romp-ibm:BSD:*)
- echo romp-ibm-bsd4.4
- exit ;;
- ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
- echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to
- exit ;; # report: romp-ibm BSD 4.3
- *:BOSX:*:*)
- echo rs6000-bull-bosx
- exit ;;
- DPX/2?00:B.O.S.:*:*)
- echo m68k-bull-sysv3
- exit ;;
- 9000/[34]??:4.3bsd:1.*:*)
- echo m68k-hp-bsd
- exit ;;
- hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
- echo m68k-hp-bsd4.4
- exit ;;
- 9000/[34678]??:HP-UX:*:*)
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
- case "${UNAME_MACHINE}" in
- 9000/31? ) HP_ARCH=m68000 ;;
- 9000/[34]?? ) HP_ARCH=m68k ;;
- 9000/[678][0-9][0-9])
- if [ -x /usr/bin/getconf ]; then
- sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
- sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
- case "${sc_cpu_version}" in
- 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
- 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
- 532) # CPU_PA_RISC2_0
- case "${sc_kernel_bits}" in
- 32) HP_ARCH="hppa2.0n" ;;
- 64) HP_ARCH="hppa2.0w" ;;
- '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20
- esac ;;
- esac
- fi
- if [ "${HP_ARCH}" = "" ]; then
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
-
- #define _HPUX_SOURCE
- #include <stdlib.h>
- #include <unistd.h>
-
- int main ()
- {
- #if defined(_SC_KERNEL_BITS)
- long bits = sysconf(_SC_KERNEL_BITS);
- #endif
- long cpu = sysconf (_SC_CPU_VERSION);
-
- switch (cpu)
- {
- case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
- case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
- case CPU_PA_RISC2_0:
- #if defined(_SC_KERNEL_BITS)
- switch (bits)
- {
- case 64: puts ("hppa2.0w"); break;
- case 32: puts ("hppa2.0n"); break;
- default: puts ("hppa2.0"); break;
- } break;
- #else /* !defined(_SC_KERNEL_BITS) */
- puts ("hppa2.0"); break;
- #endif
- default: puts ("hppa1.0"); break;
- }
- exit (0);
- }
-EOF
- (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
- test -z "$HP_ARCH" && HP_ARCH=hppa
- fi ;;
- esac
- if [ ${HP_ARCH} = "hppa2.0w" ]
- then
- eval $set_cc_for_build
-
- # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
- # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
- # generating 64-bit code. GNU and HP use different nomenclature:
- #
- # $ CC_FOR_BUILD=cc ./config.guess
- # => hppa2.0w-hp-hpux11.23
- # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
- # => hppa64-hp-hpux11.23
-
- if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
- grep -q __LP64__
- then
- HP_ARCH="hppa2.0w"
- else
- HP_ARCH="hppa64"
- fi
- fi
- echo ${HP_ARCH}-hp-hpux${HPUX_REV}
- exit ;;
- ia64:HP-UX:*:*)
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
- echo ia64-hp-hpux${HPUX_REV}
- exit ;;
- 3050*:HI-UX:*:*)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #include <unistd.h>
- int
- main ()
- {
- long cpu = sysconf (_SC_CPU_VERSION);
- /* The order matters, because CPU_IS_HP_MC68K erroneously returns
- true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
- results, however. */
- if (CPU_IS_PA_RISC (cpu))
- {
- switch (cpu)
- {
- case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
- case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
- case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
- default: puts ("hppa-hitachi-hiuxwe2"); break;
- }
- }
- else if (CPU_IS_HP_MC68K (cpu))
- puts ("m68k-hitachi-hiuxwe2");
- else puts ("unknown-hitachi-hiuxwe2");
- exit (0);
- }
-EOF
- $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
- { echo "$SYSTEM_NAME"; exit; }
- echo unknown-hitachi-hiuxwe2
- exit ;;
- 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
- echo hppa1.1-hp-bsd
- exit ;;
- 9000/8??:4.3bsd:*:*)
- echo hppa1.0-hp-bsd
- exit ;;
- *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
- echo hppa1.0-hp-mpeix
- exit ;;
- hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
- echo hppa1.1-hp-osf
- exit ;;
- hp8??:OSF1:*:*)
- echo hppa1.0-hp-osf
- exit ;;
- i*86:OSF1:*:*)
- if [ -x /usr/sbin/sysversion ] ; then
- echo ${UNAME_MACHINE}-${VENDOR}-osf1mk
- else
- echo ${UNAME_MACHINE}-${VENDOR}-osf1
- fi
- exit ;;
- parisc*:Lites*:*:*)
- echo hppa1.1-hp-lites
- exit ;;
- C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
- echo c1-convex-bsd
- exit ;;
- C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
- if getsysinfo -f scalar_acc
- then echo c32-convex-bsd
- else echo c2-convex-bsd
- fi
- exit ;;
- C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
- echo c34-convex-bsd
- exit ;;
- C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
- echo c38-convex-bsd
- exit ;;
- C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
- echo c4-convex-bsd
- exit ;;
- CRAY*Y-MP:*:*:*)
- echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*[A-Z]90:*:*:*)
- echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
- | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
- -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
- -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*TS:*:*:*)
- echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*T3E:*:*:*)
- echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- CRAY*SV1:*:*:*)
- echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- *:UNICOS/mp:*:*)
- echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
- exit ;;
- F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
- FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
- FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
- echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
- exit ;;
- 5000:UNIX_System_V:4.*:*)
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
- FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
- echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
- exit ;;
- i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
- echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
- exit ;;
- sparc*:BSD/OS:*:*)
- echo sparc-${VENDOR}-bsdi${UNAME_RELEASE}
- exit ;;
- *:BSD/OS:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-bsdi${UNAME_RELEASE}
- exit ;;
- *:FreeBSD:*:*)
- case ${UNAME_MACHINE} in
- pc98)
- echo i386-${VENDOR}-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
- amd64)
- echo x86_64-${VENDOR}-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
- *)
- echo ${UNAME_MACHINE}-${VENDOR}-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
- esac
- exit ;;
- i*:CYGWIN*:*)
- echo ${UNAME_MACHINE}-pc-cygwin
- exit ;;
- *:MINGW*:*)
- echo ${UNAME_MACHINE}-pc-mingw32
- exit ;;
- i*:windows32*:*)
- # uname -m includes "-pc" on this system.
- echo ${UNAME_MACHINE}-mingw32
- exit ;;
- i*:PW*:*)
- echo ${UNAME_MACHINE}-pc-pw32
- exit ;;
- *:Interix*:*)
- case ${UNAME_MACHINE} in
- x86)
- echo i586-pc-interix${UNAME_RELEASE}
- exit ;;
- authenticamd | genuineintel | EM64T)
- echo x86_64-${VENDOR}-interix${UNAME_RELEASE}
- exit ;;
- IA64)
- echo ia64-${VENDOR}-interix${UNAME_RELEASE}
- exit ;;
- esac ;;
- [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
- echo i${UNAME_MACHINE}-pc-mks
- exit ;;
- 8664:Windows_NT:*)
- echo x86_64-pc-mks
- exit ;;
- i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
- # How do we know it's Interix rather than the generic POSIX subsystem?
- # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
- # UNAME_MACHINE based on the output of uname instead of i386?
- echo i586-pc-interix
- exit ;;
- i*:UWIN*:*)
- echo ${UNAME_MACHINE}-pc-uwin
- exit ;;
- amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
- echo x86_64-${VENDOR}-cygwin
- exit ;;
- p*:CYGWIN*:*)
- echo powerpcle-${VENDOR}-cygwin
- exit ;;
- prep*:SunOS:5.*:*)
- echo powerpcle-${VENDOR}-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
- exit ;;
- *:GNU:*:*)
- # the GNU system
- echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-${VENDOR}-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
- exit ;;
- *:GNU/*:*:*)
- # other systems with GNU libc and userland
- echo ${UNAME_MACHINE}-${VENDOR}-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
- exit ;;
- i*86:Minix:*:*)
- echo ${UNAME_MACHINE}-pc-minix
- exit ;;
- alpha:Linux:*:*)
- case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
- EV5) UNAME_MACHINE=alphaev5 ;;
- EV56) UNAME_MACHINE=alphaev56 ;;
- PCA56) UNAME_MACHINE=alphapca56 ;;
- PCA57) UNAME_MACHINE=alphapca56 ;;
- EV6) UNAME_MACHINE=alphaev6 ;;
- EV67) UNAME_MACHINE=alphaev67 ;;
- EV68*) UNAME_MACHINE=alphaev68 ;;
- esac
- objdump --private-headers /bin/sh | grep -q ld.so.1
- if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu${LIBC}
- exit ;;
- arm*:Linux:*:*)
- eval $set_cc_for_build
- if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
- | grep -q __ARM_EABI__
- then
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- else
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnueabi
- fi
- exit ;;
- avr32*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- cris:Linux:*:*)
- echo cris-axis-linux-gnu
- exit ;;
- crisv32:Linux:*:*)
- echo crisv32-axis-linux-gnu
- exit ;;
- frv:Linux:*:*)
- echo frv-${VENDOR}-linux-gnu
- exit ;;
- i*86:Linux:*:*)
- LIBC=gnu
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #ifdef __dietlibc__
- LIBC=dietlibc
- #endif
-EOF
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
- echo "${UNAME_MACHINE}-${VENDOR}-linux-${LIBC}"
- exit ;;
- ia64:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- m32r*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- m68*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- mips:Linux:*:* | mips64:Linux:*:*)
- eval $set_cc_for_build
- sed 's/^ //' << EOF >$dummy.c
- #undef CPU
- #undef ${UNAME_MACHINE}
- #undef ${UNAME_MACHINE}el
- #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
- CPU=${UNAME_MACHINE}el
- #else
- #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
- CPU=${UNAME_MACHINE}
- #else
- CPU=
- #endif
- #endif
-EOF
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
- test x"${CPU}" != x && { echo "${CPU}-${VENDOR}-linux-gnu"; exit; }
- ;;
- or32:Linux:*:*)
- echo or32-${VENDOR}-linux-gnu
- exit ;;
- padre:Linux:*:*)
- echo sparc-${VENDOR}-linux-gnu
- exit ;;
- parisc64:Linux:*:* | hppa64:Linux:*:*)
- echo hppa64-${VENDOR}-linux-gnu
- exit ;;
- parisc:Linux:*:* | hppa:Linux:*:*)
- # Look for CPU level
- case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
- PA7*) echo hppa1.1-${VENDOR}-linux-gnu ;;
- PA8*) echo hppa2.0-${VENDOR}-linux-gnu ;;
- *) echo hppa-${VENDOR}-linux-gnu ;;
- esac
- exit ;;
- ppc64:Linux:*:*)
- echo powerpc64-${VENDOR}-linux-gnu
- exit ;;
- ppc:Linux:*:*)
- echo powerpc-${VENDOR}-linux-gnu
- exit ;;
- s390:Linux:*:* | s390x:Linux:*:*)
- echo ${UNAME_MACHINE}-ibm-linux
- exit ;;
- sh64*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- sh*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- sparc:Linux:*:* | sparc64:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- vax:Linux:*:*)
- echo ${UNAME_MACHINE}-dec-linux-gnu
- exit ;;
- x86_64:Linux:*:*)
- echo x86_64-${VENDOR}-linux-gnu
- exit ;;
- xtensa*:Linux:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-linux-gnu
- exit ;;
- i*86:DYNIX/ptx:4*:*)
- # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
- # earlier versions are messed up and put the nodename in both
- # sysname and nodename.
- echo i386-sequent-sysv4
- exit ;;
- i*86:UNIX_SV:4.2MP:2.*)
- # Unixware is an offshoot of SVR4, but it has its own version
- # number series starting with 2...
- # I am not positive that other SVR4 systems won't match this,
- # I just have to hope. -- rms.
- # Use sysv4.2uw... so that sysv4* matches it.
- echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
- exit ;;
- i*86:OS/2:*:*)
- # If we were able to find `uname', then EMX Unix compatibility
- # is probably installed.
- echo ${UNAME_MACHINE}-pc-os2-emx
- exit ;;
- i*86:XTS-300:*:STOP)
- echo ${UNAME_MACHINE}-${VENDOR}-stop
- exit ;;
- i*86:atheos:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-atheos
- exit ;;
- i*86:syllable:*:*)
- echo ${UNAME_MACHINE}-pc-syllable
- exit ;;
- i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
- echo i386-${VENDOR}-lynxos${UNAME_RELEASE}
- exit ;;
- i*86:*DOS:*:*)
- echo ${UNAME_MACHINE}-pc-msdosdjgpp
- exit ;;
- i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
- UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
- if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
- echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
- else
- echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
- fi
- exit ;;
- i*86:*:5:[678]*)
- # UnixWare 7.x, OpenUNIX and OpenServer 6.
- case `/bin/uname -X | grep "^Machine"` in
- *486*) UNAME_MACHINE=i486 ;;
- *Pentium) UNAME_MACHINE=i586 ;;
- *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
- esac
- echo ${UNAME_MACHINE}-${VENDOR}-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
- exit ;;
- i*86:*:3.2:*)
- if test -f /usr/options/cb.name; then
- UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
- echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
- elif /bin/uname -X 2>/dev/null >/dev/null ; then
- UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
- (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
- (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
- && UNAME_MACHINE=i586
- (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
- && UNAME_MACHINE=i686
- (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
- && UNAME_MACHINE=i686
- echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
- else
- echo ${UNAME_MACHINE}-pc-sysv32
- fi
- exit ;;
- pc:*:*:*)
- # Left here for compatibility:
- # uname -m prints for DJGPP always 'pc', but it prints nothing about
- # the processor, so we play safe by assuming i586.
- # Note: whatever this is, it MUST be the same as what config.sub
- # prints for the "djgpp" host, or else GDB configury will decide that
- # this is a cross-build.
- echo i586-pc-msdosdjgpp
- exit ;;
- Intel:Mach:3*:*)
- echo i386-pc-mach3
- exit ;;
- paragon:*:*:*)
- echo i860-intel-osf1
- exit ;;
- i860:*:4.*:*) # i860-SVR4
- if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
- echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
- else # Add other i860-SVR4 vendors below as they are discovered.
- echo i860-${VENDOR}-sysv${UNAME_RELEASE} # Unknown i860-SVR4
- fi
- exit ;;
- mini*:CTIX:SYS*5:*)
- # "miniframe"
- echo m68010-convergent-sysv
- exit ;;
- mc68k:UNIX:SYSTEM5:3.51m)
- echo m68k-convergent-sysv
- exit ;;
- M680?0:D-NIX:5.3:*)
- echo m68k-diab-dnix
- exit ;;
- M68*:*:R3V[5678]*:*)
- test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
- 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
- OS_REL=''
- test -r /etc/.relid \
- && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
- && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
- /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
- 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
- && { echo i486-ncr-sysv4; exit; } ;;
- NCR*:*:4.2:* | MPRAS*:*:4.2:*)
- OS_REL='.3'
- test -r /etc/.relid \
- && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
- && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
- /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
- /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
- m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
- echo m68k-${VENDOR}-lynxos${UNAME_RELEASE}
- exit ;;
- mc68030:UNIX_System_V:4.*:*)
- echo m68k-atari-sysv4
- exit ;;
- TSUNAMI:LynxOS:2.*:*)
- echo sparc-${VENDOR}-lynxos${UNAME_RELEASE}
- exit ;;
- rs6000:LynxOS:2.*:*)
- echo rs6000-${VENDOR}-lynxos${UNAME_RELEASE}
- exit ;;
- PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
- echo powerpc-${VENDOR}-lynxos${UNAME_RELEASE}
- exit ;;
- SM[BE]S:UNIX_SV:*:*)
- echo mips-dde-sysv${UNAME_RELEASE}
- exit ;;
- RM*:ReliantUNIX-*:*:*)
- echo mips-sni-sysv4
- exit ;;
- RM*:SINIX-*:*:*)
- echo mips-sni-sysv4
- exit ;;
- *:SINIX-*:*:*)
- if uname -p 2>/dev/null >/dev/null ; then
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
- echo ${UNAME_MACHINE}-sni-sysv4
- else
- echo ns32k-sni-sysv
- fi
- exit ;;
- PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
- # says <Richard.M.Bartel@ccMail.Census.GOV>
- echo i586-unisys-sysv4
- exit ;;
- *:UNIX_System_V:4*:FTX*)
- # From Gerald Hewes <hewes@openmarket.com>.
- # How about differentiating between stratus architectures? -djm
- echo hppa1.1-stratus-sysv4
- exit ;;
- *:*:*:FTX*)
- # From seanf@swdc.stratus.com.
- echo i860-stratus-sysv4
- exit ;;
- i*86:VOS:*:*)
- # From Paul.Green@stratus.com.
- echo ${UNAME_MACHINE}-stratus-vos
- exit ;;
- *:VOS:*:*)
- # From Paul.Green@stratus.com.
- echo hppa1.1-stratus-vos
- exit ;;
- mc68*:A/UX:*:*)
- echo m68k-apple-aux${UNAME_RELEASE}
- exit ;;
- news*:NEWS-OS:6*:*)
- echo mips-sony-newsos6
- exit ;;
- R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
- if [ -d /usr/nec ]; then
- echo mips-nec-sysv${UNAME_RELEASE}
- else
- echo mips-${VENDOR}-sysv${UNAME_RELEASE}
- fi
- exit ;;
- BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
- echo powerpc-be-beos
- exit ;;
- BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
- echo powerpc-apple-beos
- exit ;;
- BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
- echo i586-pc-beos
- exit ;;
- BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
- echo i586-pc-haiku
- exit ;;
- SX-4:SUPER-UX:*:*)
- echo sx4-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-5:SUPER-UX:*:*)
- echo sx5-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-6:SUPER-UX:*:*)
- echo sx6-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-7:SUPER-UX:*:*)
- echo sx7-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-8:SUPER-UX:*:*)
- echo sx8-nec-superux${UNAME_RELEASE}
- exit ;;
- SX-8R:SUPER-UX:*:*)
- echo sx8r-nec-superux${UNAME_RELEASE}
- exit ;;
- Power*:Rhapsody:*:*)
- echo powerpc-apple-rhapsody${UNAME_RELEASE}
- exit ;;
- *:Rhapsody:*:*)
- echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
- exit ;;
- *:Darwin:*:*)
- UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
- case $UNAME_PROCESSOR in
- i386)
- eval $set_cc_for_build
- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
- if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
- grep IS_64BIT_ARCH >/dev/null
- then
- UNAME_PROCESSOR="x86_64"
- fi
- fi ;;
- unknown) UNAME_PROCESSOR=powerpc ;;
- esac
- echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
- exit ;;
- *:procnto*:*:* | *:QNX:[0123456789]*:*)
- UNAME_PROCESSOR=`uname -p`
- if test "$UNAME_PROCESSOR" = "x86"; then
- UNAME_PROCESSOR=i386
- UNAME_MACHINE=pc
- fi
- echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
- exit ;;
- *:QNX:*:4*)
- echo i386-pc-qnx
- exit ;;
- NSE-?:NONSTOP_KERNEL:*:*)
- echo nse-tandem-nsk${UNAME_RELEASE}
- exit ;;
- NSR-?:NONSTOP_KERNEL:*:*)
- echo nsr-tandem-nsk${UNAME_RELEASE}
- exit ;;
- *:NonStop-UX:*:*)
- echo mips-compaq-nonstopux
- exit ;;
- BS2000:POSIX*:*:*)
- echo bs2000-siemens-sysv
- exit ;;
- DS/*:UNIX_System_V:*:*)
- echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
- exit ;;
- *:Plan9:*:*)
- # "uname -m" is not consistent, so use $cputype instead. 386
- # is converted to i386 for consistency with other x86
- # operating systems.
- if test "$cputype" = "386"; then
- UNAME_MACHINE=i386
- else
- UNAME_MACHINE="$cputype"
- fi
- echo ${UNAME_MACHINE}-${VENDOR}-plan9
- exit ;;
- *:TOPS-10:*:*)
- echo pdp10-${VENDOR}-tops10
- exit ;;
- *:TENEX:*:*)
- echo pdp10-${VENDOR}-tenex
- exit ;;
- KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
- echo pdp10-dec-tops20
- exit ;;
- XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
- echo pdp10-xkl-tops20
- exit ;;
- *:TOPS-20:*:*)
- echo pdp10-${VENDOR}-tops20
- exit ;;
- *:ITS:*:*)
- echo pdp10-${VENDOR}-its
- exit ;;
- SEI:*:*:SEIUX)
- echo mips-sei-seiux${UNAME_RELEASE}
- exit ;;
- *:DragonFly:*:*)
- echo ${UNAME_MACHINE}-${VENDOR}-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
- exit ;;
- *:*VMS:*:*)
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
- case "${UNAME_MACHINE}" in
- A*) echo alpha-dec-vms ; exit ;;
- I*) echo ia64-dec-vms ; exit ;;
- V*) echo vax-dec-vms ; exit ;;
- esac ;;
- *:XENIX:*:SysV)
- echo i386-pc-xenix
- exit ;;
- i*86:skyos:*:*)
- echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
- exit ;;
- i*86:rdos:*:*)
- echo ${UNAME_MACHINE}-pc-rdos
- exit ;;
- i*86:AROS:*:*)
- echo ${UNAME_MACHINE}-pc-aros
- exit ;;
-esac
-
-#echo '(No uname command or uname output not recognized.)' 1>&2
-#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
-
-eval $set_cc_for_build
-cat >$dummy.c <<EOF
-#ifdef _SEQUENT_
-# include <sys/types.h>
-# include <sys/utsname.h>
-#endif
-main ()
-{
-#if defined (sony)
-#if defined (MIPSEB)
- /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
- I don't know.... */
- printf ("mips-sony-bsd\n"); exit (0);
-#else
-#include <sys/param.h>
- printf ("m68k-sony-newsos%s\n",
-#ifdef NEWSOS4
- "4"
-#else
- ""
-#endif
- ); exit (0);
-#endif
-#endif
-
-#if defined (__arm) && defined (__acorn) && defined (__unix)
- printf ("arm-acorn-riscix\n"); exit (0);
-#endif
-
-#if defined (hp300) && !defined (hpux)
- printf ("m68k-hp-bsd\n"); exit (0);
-#endif
-
-#if defined (NeXT)
-#if !defined (__ARCHITECTURE__)
-#define __ARCHITECTURE__ "m68k"
-#endif
- int version;
- version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
- if (version < 4)
- printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
- else
- printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
- exit (0);
-#endif
-
-#if defined (MULTIMAX) || defined (n16)
-#if defined (UMAXV)
- printf ("ns32k-encore-sysv\n"); exit (0);
-#else
-#if defined (CMU)
- printf ("ns32k-encore-mach\n"); exit (0);
-#else
- printf ("ns32k-encore-bsd\n"); exit (0);
-#endif
-#endif
-#endif
-
-#if defined (__386BSD__)
- printf ("i386-pc-bsd\n"); exit (0);
-#endif
-
-#if defined (sequent)
-#if defined (i386)
- printf ("i386-sequent-dynix\n"); exit (0);
-#endif
-#if defined (ns32000)
- printf ("ns32k-sequent-dynix\n"); exit (0);
-#endif
-#endif
-
-#if defined (_SEQUENT_)
- struct utsname un;
-
- uname(&un);
-
- if (strncmp(un.version, "V2", 2) == 0) {
- printf ("i386-sequent-ptx2\n"); exit (0);
- }
- if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
- printf ("i386-sequent-ptx1\n"); exit (0);
- }
- printf ("i386-sequent-ptx\n"); exit (0);
-
-#endif
-
-#if defined (vax)
-# if !defined (ultrix)
-# include <sys/param.h>
-# if defined (BSD)
-# if BSD == 43
- printf ("vax-dec-bsd4.3\n"); exit (0);
-# else
-# if BSD == 199006
- printf ("vax-dec-bsd4.3reno\n"); exit (0);
-# else
- printf ("vax-dec-bsd\n"); exit (0);
-# endif
-# endif
-# else
- printf ("vax-dec-bsd\n"); exit (0);
-# endif
-# else
- printf ("vax-dec-ultrix\n"); exit (0);
-# endif
-#endif
-
-#if defined (alliant) && defined (i860)
- printf ("i860-alliant-bsd\n"); exit (0);
-#endif
-
- exit (1);
-}
-EOF
-
-$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
- { echo "$SYSTEM_NAME"; exit; }
-
-# Apollos put the system type in the environment.
-
-test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
-
-# Convex versions that predate uname can use getsysinfo(1)
-
-if [ -x /usr/convex/getsysinfo ]
-then
- case `getsysinfo -f cpu_type` in
- c1*)
- echo c1-convex-bsd
- exit ;;
- c2*)
- if getsysinfo -f scalar_acc
- then echo c32-convex-bsd
- else echo c2-convex-bsd
- fi
- exit ;;
- c34*)
- echo c34-convex-bsd
- exit ;;
- c38*)
- echo c38-convex-bsd
- exit ;;
- c4*)
- echo c4-convex-bsd
- exit ;;
- esac
-fi
-
-cat >&2 <<EOF
-$0: unable to guess system type
-
-This script, last modified $timestamp, has failed to recognize
-the operating system you are using. It is advised that you
-download the most up to date version of the config scripts from
-
- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
-and
- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
-
-If the version you run ($0) is already up to date, please
-send the following data and any information you think might be
-pertinent to <config-patches@gnu.org> in order to provide the needed
-information to handle your system.
-
-config.guess timestamp = $timestamp
-
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
-
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
-/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
-
-hostinfo = `(hostinfo) 2>/dev/null`
-/bin/universe = `(/bin/universe) 2>/dev/null`
-/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
-/bin/arch = `(/bin/arch) 2>/dev/null`
-/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
-
-UNAME_MACHINE = ${UNAME_MACHINE}
-UNAME_RELEASE = ${UNAME_RELEASE}
-UNAME_SYSTEM = ${UNAME_SYSTEM}
-UNAME_VERSION = ${UNAME_VERSION}
-EOF
-
-exit 1
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:
diff --git a/scripts/training/compact-rule-table/config.h.in b/scripts/training/compact-rule-table/config.h.in
deleted file mode 100644
index 83d7f588c..000000000
--- a/scripts/training/compact-rule-table/config.h.in
+++ /dev/null
@@ -1,83 +0,0 @@
-/* config.h.in. Generated from configure.ac by autoheader. */
-
-/* Defined if the requested minimum BOOST version is satisfied */
-#undef HAVE_BOOST
-
-/* Define to 1 if you have <boost/algorithm/string.hpp> */
-#undef HAVE_BOOST_ALGORITHM_STRING_HPP
-
-/* Define to 1 if you have <boost/program_options.hpp> */
-#undef HAVE_BOOST_PROGRAM_OPTIONS_HPP
-
-/* Define to 1 if you have <boost/unordered_map.hpp> */
-#undef HAVE_BOOST_UNORDERED_MAP_HPP
-
-/* Define to 1 if you have the <dlfcn.h> header file. */
-#undef HAVE_DLFCN_H
-
-/* Define to 1 if you have the <inttypes.h> header file. */
-#undef HAVE_INTTYPES_H
-
-/* Define to 1 if you have the <memory.h> header file. */
-#undef HAVE_MEMORY_H
-
-/* Define to 1 if stdbool.h conforms to C99. */
-#undef HAVE_STDBOOL_H
-
-/* Define to 1 if you have the <stdint.h> header file. */
-#undef HAVE_STDINT_H
-
-/* Define to 1 if you have the <stdlib.h> header file. */
-#undef HAVE_STDLIB_H
-
-/* Define to 1 if you have the <strings.h> header file. */
-#undef HAVE_STRINGS_H
-
-/* Define to 1 if you have the <string.h> header file. */
-#undef HAVE_STRING_H
-
-/* Define to 1 if you have the <sys/stat.h> header file. */
-#undef HAVE_SYS_STAT_H
-
-/* Define to 1 if you have the <sys/types.h> header file. */
-#undef HAVE_SYS_TYPES_H
-
-/* Define to 1 if you have the <unistd.h> header file. */
-#undef HAVE_UNISTD_H
-
-/* Define to 1 if the system has the type `_Bool'. */
-#undef HAVE__BOOL
-
-/* Define to the sub-directory in which libtool stores uninstalled libraries.
- */
-#undef LT_OBJDIR
-
-/* Name of package */
-#undef PACKAGE
-
-/* Define to the address where bug reports for this package should be sent. */
-#undef PACKAGE_BUGREPORT
-
-/* Define to the full name of this package. */
-#undef PACKAGE_NAME
-
-/* Define to the full name and version of this package. */
-#undef PACKAGE_STRING
-
-/* Define to the one symbol short name of this package. */
-#undef PACKAGE_TARNAME
-
-/* Define to the home page for this package. */
-#undef PACKAGE_URL
-
-/* Define to the version of this package. */
-#undef PACKAGE_VERSION
-
-/* Define to 1 if you have the ANSI C header files. */
-#undef STDC_HEADERS
-
-/* Version number of package */
-#undef VERSION
-
-/* Define to `unsigned int' if <sys/types.h> does not define. */
-#undef size_t
diff --git a/scripts/training/compact-rule-table/config.sub b/scripts/training/compact-rule-table/config.sub
deleted file mode 100755
index 2a55a5075..000000000
--- a/scripts/training/compact-rule-table/config.sub
+++ /dev/null
@@ -1,1705 +0,0 @@
-#! /bin/sh
-# Configuration validation subroutine script.
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
-# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
-# Free Software Foundation, Inc.
-
-timestamp='2009-11-20'
-
-# This file is (in principle) common to ALL GNU software.
-# The presence of a machine in this file suggests that SOME GNU software
-# can handle that machine. It does not imply ALL GNU software can.
-#
-# This file is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
-# 02110-1301, USA.
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-
-# Please send patches to <config-patches@gnu.org>. Submit a context
-# diff and a properly formatted GNU ChangeLog entry.
-#
-# Configuration subroutine to validate and canonicalize a configuration type.
-# Supply the specified configuration type as an argument.
-# If it is invalid, we print an error message on stderr and exit with code 1.
-# Otherwise, we print the canonical config type on stdout and succeed.
-
-# You can get the latest version of this script from:
-# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
-
-# This file is supposed to be the same for all GNU packages
-# and recognize all the CPU types, system types and aliases
-# that are meaningful with *any* GNU software.
-# Each package is responsible for reporting which valid configurations
-# it does not support. The user should be able to distinguish
-# a failure to support a valid configuration from a meaningless
-# configuration.
-
-# The goal of this file is to map all the various variations of a given
-# machine specification into a single specification in the form:
-# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
-# or in some cases, the newer four-part form:
-# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
-# It is wrong to echo any other type of specification.
-
-me=`echo "$0" | sed -e 's,.*/,,'`
-
-usage="\
-Usage: $0 [OPTION] CPU-MFR-OPSYS
- $0 [OPTION] ALIAS
-
-Canonicalize a configuration name.
-
-Operation modes:
- -h, --help print this help, then exit
- -t, --time-stamp print date of last modification, then exit
- -v, --version print version number, then exit
-
-Report bugs and patches to <config-patches@gnu.org>."
-
-version="\
-GNU config.sub ($timestamp)
-
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
-2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions. There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
- case $1 in
- --time-stamp | --time* | -t )
- echo "$timestamp" ; exit ;;
- --version | -v )
- echo "$version" ; exit ;;
- --help | --h* | -h )
- echo "$usage"; exit ;;
- -- ) # Stop option processing
- shift; break ;;
- - ) # Use stdin as input.
- break ;;
- -* )
- echo "$me: invalid option $1$help"
- exit 1 ;;
-
- *local*)
- # First pass through any local machine types.
- echo $1
- exit ;;
-
- * )
- break ;;
- esac
-done
-
-case $# in
- 0) echo "$me: missing argument$help" >&2
- exit 1;;
- 1) ;;
- *) echo "$me: too many arguments$help" >&2
- exit 1;;
-esac
-
-# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
-# Here we must recognize all the valid KERNEL-OS combinations.
-maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
-case $maybe_os in
- nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \
- uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \
- kopensolaris*-gnu* | \
- storm-chaos* | os2-emx* | rtmk-nova*)
- os=-$maybe_os
- basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
- ;;
- *)
- basic_machine=`echo $1 | sed 's/-[^-]*$//'`
- if [ $basic_machine != $1 ]
- then os=`echo $1 | sed 's/.*-/-/'`
- else os=; fi
- ;;
-esac
-
-### Let's recognize common machines as not being operating systems so
-### that things like config.sub decstation-3100 work. We also
-### recognize some manufacturers as not being operating systems, so we
-### can provide default operating systems below.
-case $os in
- -sun*os*)
- # Prevent following clause from handling this invalid input.
- ;;
- -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
- -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
- -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
- -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
- -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
- -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
- -apple | -axis | -knuth | -cray | -microblaze)
- os=
- basic_machine=$1
- ;;
- -bluegene*)
- os=-cnk
- ;;
- -sim | -cisco | -oki | -wec | -winbond)
- os=
- basic_machine=$1
- ;;
- -scout)
- ;;
- -wrs)
- os=-vxworks
- basic_machine=$1
- ;;
- -chorusos*)
- os=-chorusos
- basic_machine=$1
- ;;
- -chorusrdb)
- os=-chorusrdb
- basic_machine=$1
- ;;
- -hiux*)
- os=-hiuxwe2
- ;;
- -sco6)
- os=-sco5v6
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco5)
- os=-sco3.2v5
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco4)
- os=-sco3.2v4
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco3.2.[4-9]*)
- os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco3.2v[4-9]*)
- # Don't forget version if it is 3.2v4 or newer.
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco5v6*)
- # Don't forget version if it is 3.2v4 or newer.
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -sco*)
- os=-sco3.2v2
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -udk*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -isc)
- os=-isc2.2
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -clix*)
- basic_machine=clipper-intergraph
- ;;
- -isc*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
- ;;
- -lynx*)
- os=-lynxos
- ;;
- -ptx*)
- basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
- ;;
- -windowsnt*)
- os=`echo $os | sed -e 's/windowsnt/winnt/'`
- ;;
- -psos*)
- os=-psos
- ;;
- -mint | -mint[0-9]*)
- basic_machine=m68k-atari
- os=-mint
- ;;
-esac
-
-# Decode aliases for certain CPU-COMPANY combinations.
-case $basic_machine in
- # Recognize the basic CPU types without company name.
- # Some are omitted here because they have special meanings below.
- 1750a | 580 \
- | a29k \
- | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
- | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
- | am33_2.0 \
- | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
- | bfin \
- | c4x | clipper \
- | d10v | d30v | dlx | dsp16xx \
- | fido | fr30 | frv \
- | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
- | i370 | i860 | i960 | ia64 \
- | ip2k | iq2000 \
- | lm32 \
- | m32c | m32r | m32rle | m68000 | m68k | m88k \
- | maxq | mb | microblaze | mcore | mep | metag \
- | mips | mipsbe | mipseb | mipsel | mipsle \
- | mips16 \
- | mips64 | mips64el \
- | mips64octeon | mips64octeonel \
- | mips64orion | mips64orionel \
- | mips64r5900 | mips64r5900el \
- | mips64vr | mips64vrel \
- | mips64vr4100 | mips64vr4100el \
- | mips64vr4300 | mips64vr4300el \
- | mips64vr5000 | mips64vr5000el \
- | mips64vr5900 | mips64vr5900el \
- | mipsisa32 | mipsisa32el \
- | mipsisa32r2 | mipsisa32r2el \
- | mipsisa64 | mipsisa64el \
- | mipsisa64r2 | mipsisa64r2el \
- | mipsisa64sb1 | mipsisa64sb1el \
- | mipsisa64sr71k | mipsisa64sr71kel \
- | mipstx39 | mipstx39el \
- | mn10200 | mn10300 \
- | moxie \
- | mt \
- | msp430 \
- | nios | nios2 \
- | ns16k | ns32k \
- | or32 \
- | pdp10 | pdp11 | pj | pjl \
- | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
- | pyramid \
- | rx \
- | score \
- | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
- | sh64 | sh64le \
- | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
- | sparcv8 | sparcv9 | sparcv9b | sparcv9v \
- | spu | strongarm \
- | tahoe | thumb | tic4x | tic80 | tron \
- | ubicom32 \
- | v850 | v850e \
- | we32k \
- | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \
- | z8k | z80)
- basic_machine=$basic_machine-unknown
- ;;
- m6811 | m68hc11 | m6812 | m68hc12 | picochip)
- # Motorola 68HC11/12.
- basic_machine=$basic_machine-unknown
- os=-none
- ;;
- m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
- ;;
- ms1)
- basic_machine=mt-unknown
- ;;
-
- # We use `pc' rather than `unknown'
- # because (1) that's what they normally are, and
- # (2) the word "unknown" tends to confuse beginning users.
- i*86 | x86_64)
- basic_machine=$basic_machine-pc
- ;;
- # Object if more than one company name word.
- *-*-*)
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
- exit 1
- ;;
- # Recognize the basic CPU types with company name.
- 580-* \
- | a29k-* \
- | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
- | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
- | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
- | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
- | avr-* | avr32-* \
- | bfin-* | bs2000-* \
- | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
- | clipper-* | craynv-* | cydra-* \
- | d10v-* | d30v-* | dlx-* \
- | elxsi-* \
- | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
- | h8300-* | h8500-* \
- | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
- | i*86-* | i860-* | i960-* | ia64-* \
- | ip2k-* | iq2000-* \
- | lm32-* \
- | m32c-* | m32r-* | m32rle-* \
- | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
- | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \
- | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
- | mips16-* \
- | mips64-* | mips64el-* \
- | mips64octeon-* | mips64octeonel-* \
- | mips64orion-* | mips64orionel-* \
- | mips64r5900-* | mips64r5900el-* \
- | mips64vr-* | mips64vrel-* \
- | mips64vr4100-* | mips64vr4100el-* \
- | mips64vr4300-* | mips64vr4300el-* \
- | mips64vr5000-* | mips64vr5000el-* \
- | mips64vr5900-* | mips64vr5900el-* \
- | mipsisa32-* | mipsisa32el-* \
- | mipsisa32r2-* | mipsisa32r2el-* \
- | mipsisa64-* | mipsisa64el-* \
- | mipsisa64r2-* | mipsisa64r2el-* \
- | mipsisa64sb1-* | mipsisa64sb1el-* \
- | mipsisa64sr71k-* | mipsisa64sr71kel-* \
- | mipstx39-* | mipstx39el-* \
- | mmix-* \
- | mt-* \
- | msp430-* \
- | nios-* | nios2-* \
- | none-* | np1-* | ns16k-* | ns32k-* \
- | orion-* \
- | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
- | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
- | pyramid-* \
- | romp-* | rs6000-* | rx-* \
- | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
- | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
- | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
- | sparclite-* \
- | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \
- | tahoe-* | thumb-* \
- | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* | tile-* \
- | tron-* \
- | ubicom32-* \
- | v850-* | v850e-* | vax-* \
- | we32k-* \
- | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \
- | xstormy16-* | xtensa*-* \
- | ymp-* \
- | z8k-* | z80-*)
- ;;
- # Recognize the basic CPU types without company name, with glob match.
- xtensa*)
- basic_machine=$basic_machine-unknown
- ;;
- # Recognize the various machine names and aliases which stand
- # for a CPU type and a company and sometimes even an OS.
- 386bsd)
- basic_machine=i386-unknown
- os=-bsd
- ;;
- 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
- basic_machine=m68000-att
- ;;
- 3b*)
- basic_machine=we32k-att
- ;;
- a29khif)
- basic_machine=a29k-amd
- os=-udi
- ;;
- abacus)
- basic_machine=abacus-unknown
- ;;
- adobe68k)
- basic_machine=m68010-adobe
- os=-scout
- ;;
- alliant | fx80)
- basic_machine=fx80-alliant
- ;;
- altos | altos3068)
- basic_machine=m68k-altos
- ;;
- am29k)
- basic_machine=a29k-none
- os=-bsd
- ;;
- amd64)
- basic_machine=x86_64-pc
- ;;
- amd64-*)
- basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- amdahl)
- basic_machine=580-amdahl
- os=-sysv
- ;;
- amiga | amiga-*)
- basic_machine=m68k-unknown
- ;;
- amigaos | amigados)
- basic_machine=m68k-unknown
- os=-amigaos
- ;;
- amigaunix | amix)
- basic_machine=m68k-unknown
- os=-sysv4
- ;;
- apollo68)
- basic_machine=m68k-apollo
- os=-sysv
- ;;
- apollo68bsd)
- basic_machine=m68k-apollo
- os=-bsd
- ;;
- aros)
- basic_machine=i386-pc
- os=-aros
- ;;
- aux)
- basic_machine=m68k-apple
- os=-aux
- ;;
- balance)
- basic_machine=ns32k-sequent
- os=-dynix
- ;;
- blackfin)
- basic_machine=bfin-unknown
- os=-linux
- ;;
- blackfin-*)
- basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
- os=-linux
- ;;
- bluegene*)
- basic_machine=powerpc-ibm
- os=-cnk
- ;;
- c90)
- basic_machine=c90-cray
- os=-unicos
- ;;
- cegcc)
- basic_machine=arm-unknown
- os=-cegcc
- ;;
- convex-c1)
- basic_machine=c1-convex
- os=-bsd
- ;;
- convex-c2)
- basic_machine=c2-convex
- os=-bsd
- ;;
- convex-c32)
- basic_machine=c32-convex
- os=-bsd
- ;;
- convex-c34)
- basic_machine=c34-convex
- os=-bsd
- ;;
- convex-c38)
- basic_machine=c38-convex
- os=-bsd
- ;;
- cray | j90)
- basic_machine=j90-cray
- os=-unicos
- ;;
- craynv)
- basic_machine=craynv-cray
- os=-unicosmp
- ;;
- cr16)
- basic_machine=cr16-unknown
- os=-elf
- ;;
- crds | unos)
- basic_machine=m68k-crds
- ;;
- crisv32 | crisv32-* | etraxfs*)
- basic_machine=crisv32-axis
- ;;
- cris | cris-* | etrax*)
- basic_machine=cris-axis
- ;;
- crx)
- basic_machine=crx-unknown
- os=-elf
- ;;
- da30 | da30-*)
- basic_machine=m68k-da30
- ;;
- decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
- basic_machine=mips-dec
- ;;
- decsystem10* | dec10*)
- basic_machine=pdp10-dec
- os=-tops10
- ;;
- decsystem20* | dec20*)
- basic_machine=pdp10-dec
- os=-tops20
- ;;
- delta | 3300 | motorola-3300 | motorola-delta \
- | 3300-motorola | delta-motorola)
- basic_machine=m68k-motorola
- ;;
- delta88)
- basic_machine=m88k-motorola
- os=-sysv3
- ;;
- dicos)
- basic_machine=i686-pc
- os=-dicos
- ;;
- djgpp)
- basic_machine=i586-pc
- os=-msdosdjgpp
- ;;
- dpx20 | dpx20-*)
- basic_machine=rs6000-bull
- os=-bosx
- ;;
- dpx2* | dpx2*-bull)
- basic_machine=m68k-bull
- os=-sysv3
- ;;
- ebmon29k)
- basic_machine=a29k-amd
- os=-ebmon
- ;;
- elxsi)
- basic_machine=elxsi-elxsi
- os=-bsd
- ;;
- encore | umax | mmax)
- basic_machine=ns32k-encore
- ;;
- es1800 | OSE68k | ose68k | ose | OSE)
- basic_machine=m68k-ericsson
- os=-ose
- ;;
- fx2800)
- basic_machine=i860-alliant
- ;;
- genix)
- basic_machine=ns32k-ns
- ;;
- gmicro)
- basic_machine=tron-gmicro
- os=-sysv
- ;;
- go32)
- basic_machine=i386-pc
- os=-go32
- ;;
- h3050r* | hiux*)
- basic_machine=hppa1.1-hitachi
- os=-hiuxwe2
- ;;
- h8300hms)
- basic_machine=h8300-hitachi
- os=-hms
- ;;
- h8300xray)
- basic_machine=h8300-hitachi
- os=-xray
- ;;
- h8500hms)
- basic_machine=h8500-hitachi
- os=-hms
- ;;
- harris)
- basic_machine=m88k-harris
- os=-sysv3
- ;;
- hp300-*)
- basic_machine=m68k-hp
- ;;
- hp300bsd)
- basic_machine=m68k-hp
- os=-bsd
- ;;
- hp300hpux)
- basic_machine=m68k-hp
- os=-hpux
- ;;
- hp3k9[0-9][0-9] | hp9[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hp9k2[0-9][0-9] | hp9k31[0-9])
- basic_machine=m68000-hp
- ;;
- hp9k3[2-9][0-9])
- basic_machine=m68k-hp
- ;;
- hp9k6[0-9][0-9] | hp6[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hp9k7[0-79][0-9] | hp7[0-79][0-9])
- basic_machine=hppa1.1-hp
- ;;
- hp9k78[0-9] | hp78[0-9])
- # FIXME: really hppa2.0-hp
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
- # FIXME: really hppa2.0-hp
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[0-9][13679] | hp8[0-9][13679])
- basic_machine=hppa1.1-hp
- ;;
- hp9k8[0-9][0-9] | hp8[0-9][0-9])
- basic_machine=hppa1.0-hp
- ;;
- hppa-next)
- os=-nextstep3
- ;;
- hppaosf)
- basic_machine=hppa1.1-hp
- os=-osf
- ;;
- hppro)
- basic_machine=hppa1.1-hp
- os=-proelf
- ;;
- i370-ibm* | ibm*)
- basic_machine=i370-ibm
- ;;
-# I'm not sure what "Sysv32" means. Should this be sysv3.2?
- i*86v32)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv32
- ;;
- i*86v4*)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv4
- ;;
- i*86v)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-sysv
- ;;
- i*86sol2)
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
- os=-solaris2
- ;;
- i386mach)
- basic_machine=i386-mach
- os=-mach
- ;;
- i386-vsta | vsta)
- basic_machine=i386-unknown
- os=-vsta
- ;;
- iris | iris4d)
- basic_machine=mips-sgi
- case $os in
- -irix*)
- ;;
- *)
- os=-irix4
- ;;
- esac
- ;;
- isi68 | isi)
- basic_machine=m68k-isi
- os=-sysv
- ;;
- m68knommu)
- basic_machine=m68k-unknown
- os=-linux
- ;;
- m68knommu-*)
- basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
- os=-linux
- ;;
- m88k-omron*)
- basic_machine=m88k-omron
- ;;
- magnum | m3230)
- basic_machine=mips-mips
- os=-sysv
- ;;
- merlin)
- basic_machine=ns32k-utek
- os=-sysv
- ;;
- microblaze)
- basic_machine=microblaze-xilinx
- ;;
- mingw32)
- basic_machine=i386-pc
- os=-mingw32
- ;;
- mingw32ce)
- basic_machine=arm-unknown
- os=-mingw32ce
- ;;
- miniframe)
- basic_machine=m68000-convergent
- ;;
- *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
- basic_machine=m68k-atari
- os=-mint
- ;;
- mips3*-*)
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
- ;;
- mips3*)
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
- ;;
- monitor)
- basic_machine=m68k-rom68k
- os=-coff
- ;;
- morphos)
- basic_machine=powerpc-unknown
- os=-morphos
- ;;
- msdos)
- basic_machine=i386-pc
- os=-msdos
- ;;
- ms1-*)
- basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
- ;;
- mvs)
- basic_machine=i370-ibm
- os=-mvs
- ;;
- ncr3000)
- basic_machine=i486-ncr
- os=-sysv4
- ;;
- netbsd386)
- basic_machine=i386-unknown
- os=-netbsd
- ;;
- netwinder)
- basic_machine=armv4l-rebel
- os=-linux
- ;;
- news | news700 | news800 | news900)
- basic_machine=m68k-sony
- os=-newsos
- ;;
- news1000)
- basic_machine=m68030-sony
- os=-newsos
- ;;
- news-3600 | risc-news)
- basic_machine=mips-sony
- os=-newsos
- ;;
- necv70)
- basic_machine=v70-nec
- os=-sysv
- ;;
- next | m*-next )
- basic_machine=m68k-next
- case $os in
- -nextstep* )
- ;;
- -ns2*)
- os=-nextstep2
- ;;
- *)
- os=-nextstep3
- ;;
- esac
- ;;
- nh3000)
- basic_machine=m68k-harris
- os=-cxux
- ;;
- nh[45]000)
- basic_machine=m88k-harris
- os=-cxux
- ;;
- nindy960)
- basic_machine=i960-intel
- os=-nindy
- ;;
- mon960)
- basic_machine=i960-intel
- os=-mon960
- ;;
- nonstopux)
- basic_machine=mips-compaq
- os=-nonstopux
- ;;
- np1)
- basic_machine=np1-gould
- ;;
- nsr-tandem)
- basic_machine=nsr-tandem
- ;;
- op50n-* | op60c-*)
- basic_machine=hppa1.1-oki
- os=-proelf
- ;;
- openrisc | openrisc-*)
- basic_machine=or32-unknown
- ;;
- os400)
- basic_machine=powerpc-ibm
- os=-os400
- ;;
- OSE68000 | ose68000)
- basic_machine=m68000-ericsson
- os=-ose
- ;;
- os68k)
- basic_machine=m68k-none
- os=-os68k
- ;;
- pa-hitachi)
- basic_machine=hppa1.1-hitachi
- os=-hiuxwe2
- ;;
- paragon)
- basic_machine=i860-intel
- os=-osf
- ;;
- parisc)
- basic_machine=hppa-unknown
- os=-linux
- ;;
- parisc-*)
- basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
- os=-linux
- ;;
- pbd)
- basic_machine=sparc-tti
- ;;
- pbb)
- basic_machine=m68k-tti
- ;;
- pc532 | pc532-*)
- basic_machine=ns32k-pc532
- ;;
- pc98)
- basic_machine=i386-pc
- ;;
- pc98-*)
- basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentium | p5 | k5 | k6 | nexgen | viac3)
- basic_machine=i586-pc
- ;;
- pentiumpro | p6 | 6x86 | athlon | athlon_*)
- basic_machine=i686-pc
- ;;
- pentiumii | pentium2 | pentiumiii | pentium3)
- basic_machine=i686-pc
- ;;
- pentium4)
- basic_machine=i786-pc
- ;;
- pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
- basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentiumpro-* | p6-* | 6x86-* | athlon-*)
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pentium4-*)
- basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- pn)
- basic_machine=pn-gould
- ;;
- power) basic_machine=power-ibm
- ;;
- ppc) basic_machine=powerpc-unknown
- ;;
- ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppcle | powerpclittle | ppc-le | powerpc-little)
- basic_machine=powerpcle-unknown
- ;;
- ppcle-* | powerpclittle-*)
- basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppc64) basic_machine=powerpc64-unknown
- ;;
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ppc64le | powerpc64little | ppc64-le | powerpc64-little)
- basic_machine=powerpc64le-unknown
- ;;
- ppc64le-* | powerpc64little-*)
- basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
- ;;
- ps2)
- basic_machine=i386-ibm
- ;;
- pw32)
- basic_machine=i586-unknown
- os=-pw32
- ;;
- rdos)
- basic_machine=i386-pc
- os=-rdos
- ;;
- rom68k)
- basic_machine=m68k-rom68k
- os=-coff
- ;;
- rm[46]00)
- basic_machine=mips-siemens
- ;;
- rtpc | rtpc-*)
- basic_machine=romp-ibm
- ;;
- s390 | s390-*)
- basic_machine=s390-ibm
- ;;
- s390x | s390x-*)
- basic_machine=s390x-ibm
- ;;
- sa29200)
- basic_machine=a29k-amd
- os=-udi
- ;;
- sb1)
- basic_machine=mipsisa64sb1-unknown
- ;;
- sb1el)
- basic_machine=mipsisa64sb1el-unknown
- ;;
- sde)
- basic_machine=mipsisa32-sde
- os=-elf
- ;;
- sei)
- basic_machine=mips-sei
- os=-seiux
- ;;
- sequent)
- basic_machine=i386-sequent
- ;;
- sh)
- basic_machine=sh-hitachi
- os=-hms
- ;;
- sh5el)
- basic_machine=sh5le-unknown
- ;;
- sh64)
- basic_machine=sh64-unknown
- ;;
- sparclite-wrs | simso-wrs)
- basic_machine=sparclite-wrs
- os=-vxworks
- ;;
- sps7)
- basic_machine=m68k-bull
- os=-sysv2
- ;;
- spur)
- basic_machine=spur-unknown
- ;;
- st2000)
- basic_machine=m68k-tandem
- ;;
- stratus)
- basic_machine=i860-stratus
- os=-sysv4
- ;;
- sun2)
- basic_machine=m68000-sun
- ;;
- sun2os3)
- basic_machine=m68000-sun
- os=-sunos3
- ;;
- sun2os4)
- basic_machine=m68000-sun
- os=-sunos4
- ;;
- sun3os3)
- basic_machine=m68k-sun
- os=-sunos3
- ;;
- sun3os4)
- basic_machine=m68k-sun
- os=-sunos4
- ;;
- sun4os3)
- basic_machine=sparc-sun
- os=-sunos3
- ;;
- sun4os4)
- basic_machine=sparc-sun
- os=-sunos4
- ;;
- sun4sol2)
- basic_machine=sparc-sun
- os=-solaris2
- ;;
- sun3 | sun3-*)
- basic_machine=m68k-sun
- ;;
- sun4)
- basic_machine=sparc-sun
- ;;
- sun386 | sun386i | roadrunner)
- basic_machine=i386-sun
- ;;
- sv1)
- basic_machine=sv1-cray
- os=-unicos
- ;;
- symmetry)
- basic_machine=i386-sequent
- os=-dynix
- ;;
- t3e)
- basic_machine=alphaev5-cray
- os=-unicos
- ;;
- t90)
- basic_machine=t90-cray
- os=-unicos
- ;;
- tic54x | c54x*)
- basic_machine=tic54x-unknown
- os=-coff
- ;;
- tic55x | c55x*)
- basic_machine=tic55x-unknown
- os=-coff
- ;;
- tic6x | c6x*)
- basic_machine=tic6x-unknown
- os=-coff
- ;;
- tile*)
- basic_machine=tile-unknown
- os=-linux-gnu
- ;;
- tx39)
- basic_machine=mipstx39-unknown
- ;;
- tx39el)
- basic_machine=mipstx39el-unknown
- ;;
- toad1)
- basic_machine=pdp10-xkl
- os=-tops20
- ;;
- tower | tower-32)
- basic_machine=m68k-ncr
- ;;
- tpf)
- basic_machine=s390x-ibm
- os=-tpf
- ;;
- udi29k)
- basic_machine=a29k-amd
- os=-udi
- ;;
- ultra3)
- basic_machine=a29k-nyu
- os=-sym1
- ;;
- v810 | necv810)
- basic_machine=v810-nec
- os=-none
- ;;
- vaxv)
- basic_machine=vax-dec
- os=-sysv
- ;;
- vms)
- basic_machine=vax-dec
- os=-vms
- ;;
- vpp*|vx|vx-*)
- basic_machine=f301-fujitsu
- ;;
- vxworks960)
- basic_machine=i960-wrs
- os=-vxworks
- ;;
- vxworks68)
- basic_machine=m68k-wrs
- os=-vxworks
- ;;
- vxworks29k)
- basic_machine=a29k-wrs
- os=-vxworks
- ;;
- w65*)
- basic_machine=w65-wdc
- os=-none
- ;;
- w89k-*)
- basic_machine=hppa1.1-winbond
- os=-proelf
- ;;
- xbox)
- basic_machine=i686-pc
- os=-mingw32
- ;;
- xps | xps100)
- basic_machine=xps100-honeywell
- ;;
- ymp)
- basic_machine=ymp-cray
- os=-unicos
- ;;
- z8k-*-coff)
- basic_machine=z8k-unknown
- os=-sim
- ;;
- z80-*-coff)
- basic_machine=z80-unknown
- os=-sim
- ;;
- none)
- basic_machine=none-none
- os=-none
- ;;
-
-# Here we handle the default manufacturer of certain CPU types. It is in
-# some cases the only manufacturer, in others, it is the most popular.
- w89k)
- basic_machine=hppa1.1-winbond
- ;;
- op50n)
- basic_machine=hppa1.1-oki
- ;;
- op60c)
- basic_machine=hppa1.1-oki
- ;;
- romp)
- basic_machine=romp-ibm
- ;;
- mmix)
- basic_machine=mmix-knuth
- ;;
- rs6000)
- basic_machine=rs6000-ibm
- ;;
- vax)
- basic_machine=vax-dec
- ;;
- pdp10)
- # there are many clones, so DEC is not a safe bet
- basic_machine=pdp10-unknown
- ;;
- pdp11)
- basic_machine=pdp11-dec
- ;;
- we32k)
- basic_machine=we32k-att
- ;;
- sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
- basic_machine=sh-unknown
- ;;
- sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
- basic_machine=sparc-sun
- ;;
- cydra)
- basic_machine=cydra-cydrome
- ;;
- orion)
- basic_machine=orion-highlevel
- ;;
- orion105)
- basic_machine=clipper-highlevel
- ;;
- mac | mpw | mac-mpw)
- basic_machine=m68k-apple
- ;;
- pmac | pmac-mpw)
- basic_machine=powerpc-apple
- ;;
- *-unknown)
- # Make sure to match an already-canonicalized machine name.
- ;;
- *)
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
- exit 1
- ;;
-esac
-
-# Here we canonicalize certain aliases for manufacturers.
-case $basic_machine in
- *-digital*)
- basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
- ;;
- *-commodore*)
- basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
- ;;
- *)
- ;;
-esac
-
-# Decode manufacturer-specific aliases for certain operating systems.
-
-if [ x"$os" != x"" ]
-then
-case $os in
- # First match some system type aliases
- # that might get confused with valid system types.
- # -solaris* is a basic system type, with this one exception.
- -auroraux)
- os=-auroraux
- ;;
- -solaris1 | -solaris1.*)
- os=`echo $os | sed -e 's|solaris1|sunos4|'`
- ;;
- -solaris)
- os=-solaris2
- ;;
- -svr4*)
- os=-sysv4
- ;;
- -unixware*)
- os=-sysv4.2uw
- ;;
- -gnu/linux*)
- os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
- ;;
- # First accept the basic system types.
- # The portable systems comes first.
- # Each alternative MUST END IN A *, to match a version number.
- # -sysv* is not here because it comes later, after sysvr4.
- -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
- | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
- | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
- | -sym* | -kopensolaris* \
- | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
- | -aos* | -aros* \
- | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
- | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
- | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
- | -openbsd* | -solidbsd* \
- | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
- | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
- | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
- | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
- | -chorusos* | -chorusrdb* | -cegcc* \
- | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
- | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \
- | -uxpv* | -beos* | -mpeix* | -udk* \
- | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
- | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
- | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
- | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
- | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
- | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
- | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
- # Remember, each alternative MUST END IN *, to match a version number.
- ;;
- -qnx*)
- case $basic_machine in
- x86-* | i*86-*)
- ;;
- *)
- os=-nto$os
- ;;
- esac
- ;;
- -nto-qnx*)
- ;;
- -nto*)
- os=`echo $os | sed -e 's|nto|nto-qnx|'`
- ;;
- -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
- | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
- | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
- ;;
- -mac*)
- os=`echo $os | sed -e 's|mac|macos|'`
- ;;
- -linux-dietlibc)
- os=-linux-dietlibc
- ;;
- -linux*)
- os=`echo $os | sed -e 's|linux|linux-gnu|'`
- ;;
- -sunos5*)
- os=`echo $os | sed -e 's|sunos5|solaris2|'`
- ;;
- -sunos6*)
- os=`echo $os | sed -e 's|sunos6|solaris3|'`
- ;;
- -opened*)
- os=-openedition
- ;;
- -os400*)
- os=-os400
- ;;
- -wince*)
- os=-wince
- ;;
- -osfrose*)
- os=-osfrose
- ;;
- -osf*)
- os=-osf
- ;;
- -utek*)
- os=-bsd
- ;;
- -dynix*)
- os=-bsd
- ;;
- -acis*)
- os=-aos
- ;;
- -atheos*)
- os=-atheos
- ;;
- -syllable*)
- os=-syllable
- ;;
- -386bsd)
- os=-bsd
- ;;
- -ctix* | -uts*)
- os=-sysv
- ;;
- -nova*)
- os=-rtmk-nova
- ;;
- -ns2 )
- os=-nextstep2
- ;;
- -nsk*)
- os=-nsk
- ;;
- # Preserve the version number of sinix5.
- -sinix5.*)
- os=`echo $os | sed -e 's|sinix|sysv|'`
- ;;
- -sinix*)
- os=-sysv4
- ;;
- -tpf*)
- os=-tpf
- ;;
- -triton*)
- os=-sysv3
- ;;
- -oss*)
- os=-sysv3
- ;;
- -svr4)
- os=-sysv4
- ;;
- -svr3)
- os=-sysv3
- ;;
- -sysvr4)
- os=-sysv4
- ;;
- # This must come after -sysvr4.
- -sysv*)
- ;;
- -ose*)
- os=-ose
- ;;
- -es1800*)
- os=-ose
- ;;
- -xenix)
- os=-xenix
- ;;
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
- os=-mint
- ;;
- -aros*)
- os=-aros
- ;;
- -kaos*)
- os=-kaos
- ;;
- -zvmoe)
- os=-zvmoe
- ;;
- -dicos*)
- os=-dicos
- ;;
- -none)
- ;;
- *)
- # Get rid of the `-' at the beginning of $os.
- os=`echo $os | sed 's/[^-]*-//'`
- echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
- exit 1
- ;;
-esac
-else
-
-# Here we handle the default operating systems that come with various machines.
-# The value should be what the vendor currently ships out the door with their
-# machine or put another way, the most popular os provided with the machine.
-
-# Note that if you're going to try to match "-MANUFACTURER" here (say,
-# "-sun"), then you have to tell the case statement up towards the top
-# that MANUFACTURER isn't an operating system. Otherwise, code above
-# will signal an error saying that MANUFACTURER isn't an operating
-# system, and we'll never get to this point.
-
-case $basic_machine in
- score-*)
- os=-elf
- ;;
- spu-*)
- os=-elf
- ;;
- *-acorn)
- os=-riscix1.2
- ;;
- arm*-rebel)
- os=-linux
- ;;
- arm*-semi)
- os=-aout
- ;;
- c4x-* | tic4x-*)
- os=-coff
- ;;
- # This must come before the *-dec entry.
- pdp10-*)
- os=-tops20
- ;;
- pdp11-*)
- os=-none
- ;;
- *-dec | vax-*)
- os=-ultrix4.2
- ;;
- m68*-apollo)
- os=-domain
- ;;
- i386-sun)
- os=-sunos4.0.2
- ;;
- m68000-sun)
- os=-sunos3
- # This also exists in the configure program, but was not the
- # default.
- # os=-sunos4
- ;;
- m68*-cisco)
- os=-aout
- ;;
- mep-*)
- os=-elf
- ;;
- mips*-cisco)
- os=-elf
- ;;
- mips*-*)
- os=-elf
- ;;
- or32-*)
- os=-coff
- ;;
- *-tti) # must be before sparc entry or we get the wrong os.
- os=-sysv3
- ;;
- sparc-* | *-sun)
- os=-sunos4.1.1
- ;;
- *-be)
- os=-beos
- ;;
- *-haiku)
- os=-haiku
- ;;
- *-ibm)
- os=-aix
- ;;
- *-knuth)
- os=-mmixware
- ;;
- *-wec)
- os=-proelf
- ;;
- *-winbond)
- os=-proelf
- ;;
- *-oki)
- os=-proelf
- ;;
- *-hp)
- os=-hpux
- ;;
- *-hitachi)
- os=-hiux
- ;;
- i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
- os=-sysv
- ;;
- *-cbm)
- os=-amigaos
- ;;
- *-dg)
- os=-dgux
- ;;
- *-dolphin)
- os=-sysv3
- ;;
- m68k-ccur)
- os=-rtu
- ;;
- m88k-omron*)
- os=-luna
- ;;
- *-next )
- os=-nextstep
- ;;
- *-sequent)
- os=-ptx
- ;;
- *-crds)
- os=-unos
- ;;
- *-ns)
- os=-genix
- ;;
- i370-*)
- os=-mvs
- ;;
- *-next)
- os=-nextstep3
- ;;
- *-gould)
- os=-sysv
- ;;
- *-highlevel)
- os=-bsd
- ;;
- *-encore)
- os=-bsd
- ;;
- *-sgi)
- os=-irix
- ;;
- *-siemens)
- os=-sysv4
- ;;
- *-masscomp)
- os=-rtu
- ;;
- f30[01]-fujitsu | f700-fujitsu)
- os=-uxpv
- ;;
- *-rom68k)
- os=-coff
- ;;
- *-*bug)
- os=-coff
- ;;
- *-apple)
- os=-macos
- ;;
- *-atari*)
- os=-mint
- ;;
- *)
- os=-none
- ;;
-esac
-fi
-
-# Here we handle the case where we know the os, and the CPU type, but not the
-# manufacturer. We pick the logical manufacturer.
-vendor=unknown
-case $basic_machine in
- *-unknown)
- case $os in
- -riscix*)
- vendor=acorn
- ;;
- -sunos*)
- vendor=sun
- ;;
- -cnk*|-aix*)
- vendor=ibm
- ;;
- -beos*)
- vendor=be
- ;;
- -hpux*)
- vendor=hp
- ;;
- -mpeix*)
- vendor=hp
- ;;
- -hiux*)
- vendor=hitachi
- ;;
- -unos*)
- vendor=crds
- ;;
- -dgux*)
- vendor=dg
- ;;
- -luna*)
- vendor=omron
- ;;
- -genix*)
- vendor=ns
- ;;
- -mvs* | -opened*)
- vendor=ibm
- ;;
- -os400*)
- vendor=ibm
- ;;
- -ptx*)
- vendor=sequent
- ;;
- -tpf*)
- vendor=ibm
- ;;
- -vxsim* | -vxworks* | -windiss*)
- vendor=wrs
- ;;
- -aux*)
- vendor=apple
- ;;
- -hms*)
- vendor=hitachi
- ;;
- -mpw* | -macos*)
- vendor=apple
- ;;
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
- vendor=atari
- ;;
- -vos*)
- vendor=stratus
- ;;
- esac
- basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
- ;;
-esac
-
-echo $basic_machine$os
-exit
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:
diff --git a/scripts/training/compact-rule-table/configure b/scripts/training/compact-rule-table/configure
deleted file mode 100755
index 1cc77e54f..000000000
--- a/scripts/training/compact-rule-table/configure
+++ /dev/null
@@ -1,17728 +0,0 @@
-#! /bin/sh
-# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.68 for moses-compact-rule-table 1.0.
-#
-# Report bugs to <moses-support@mit.edu>.
-#
-#
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
-# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
-# Foundation, Inc.
-#
-#
-# This configure script is free software; the Free Software Foundation
-# gives unlimited permission to copy, distribute and modify it.
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
- emulate sh
- NULLCMD=:
- # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
- # is contrary to our usage. Disable this feature.
- alias -g '${1+"$@"}'='"$@"'
- setopt NO_GLOB_SUBST
-else
- case `(set -o) 2>/dev/null` in #(
- *posix*) :
- set -o posix ;; #(
- *) :
- ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
- && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
- as_echo='print -r --'
- as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
- as_echo='printf %s\n'
- as_echo_n='printf %s'
-else
- if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
- as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
- as_echo_n='/usr/ucb/echo -n'
- else
- as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
- as_echo_n_body='eval
- arg=$1;
- case $arg in #(
- *"$as_nl"*)
- expr "X$arg" : "X\\(.*\\)$as_nl";
- arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
- esac;
- expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
- '
- export as_echo_n_body
- as_echo_n='sh -c $as_echo_n_body as_echo'
- fi
- export as_echo_body
- as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
- PATH_SEPARATOR=:
- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
- (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
- PATH_SEPARATOR=';'
- }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order. Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" "" $as_nl"
-
-# Find who we are. Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
- *[\\/]* ) as_myself=$0 ;;
- *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
- done
-IFS=$as_save_IFS
-
- ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
- as_myself=$0
-fi
-if test ! -f "$as_myself"; then
- $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
- exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there. '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
- && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-if test "x$CONFIG_SHELL" = x; then
- as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
- emulate sh
- NULLCMD=:
- # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
- # is contrary to our usage. Disable this feature.
- alias -g '\${1+\"\$@\"}'='\"\$@\"'
- setopt NO_GLOB_SUBST
-else
- case \`(set -o) 2>/dev/null\` in #(
- *posix*) :
- set -o posix ;; #(
- *) :
- ;;
-esac
-fi
-"
- as_required="as_fn_return () { (exit \$1); }
-as_fn_success () { as_fn_return 0; }
-as_fn_failure () { as_fn_return 1; }
-as_fn_ret_success () { return 0; }
-as_fn_ret_failure () { return 1; }
-
-exitcode=0
-as_fn_success || { exitcode=1; echo as_fn_success failed.; }
-as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
-as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
-as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
-if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
-
-else
- exitcode=1; echo positional parameters were not saved.
-fi
-test x\$exitcode = x0 || exit 1"
- as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
- as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
- eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
- test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
-test \$(( 1 + 1 )) = 2 || exit 1"
- if (eval "$as_required") 2>/dev/null; then :
- as_have_required=yes
-else
- as_have_required=no
-fi
- if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
-
-else
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-as_found=false
-for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- as_found=:
- case $as_dir in #(
- /*)
- for as_base in sh bash ksh sh5; do
- # Try only shells that exist, to save several forks.
- as_shell=$as_dir/$as_base
- if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
- { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
- CONFIG_SHELL=$as_shell as_have_required=yes
- if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
- break 2
-fi
-fi
- done;;
- esac
- as_found=false
-done
-$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
- { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
- CONFIG_SHELL=$SHELL as_have_required=yes
-fi; }
-IFS=$as_save_IFS
-
-
- if test "x$CONFIG_SHELL" != x; then :
- # We cannot yet assume a decent shell, so we have to provide a
- # neutralization value for shells without unset; and this also
- # works around shells that cannot unset nonexistent variables.
- # Preserve -v and -x to the replacement shell.
- BASH_ENV=/dev/null
- ENV=/dev/null
- (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
- export CONFIG_SHELL
- case $- in # ((((
- *v*x* | *x*v* ) as_opts=-vx ;;
- *v* ) as_opts=-v ;;
- *x* ) as_opts=-x ;;
- * ) as_opts= ;;
- esac
- exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
-fi
-
- if test x$as_have_required = xno; then :
- $as_echo "$0: This script requires a shell more modern than all"
- $as_echo "$0: the shells that I found on your system."
- if test x${ZSH_VERSION+set} = xset ; then
- $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
- $as_echo "$0: be upgraded to zsh 4.3.4 or later."
- else
- $as_echo "$0: Please tell bug-autoconf@gnu.org and
-$0: moses-support@mit.edu about your system, including any
-$0: error possibly output before this message. Then install
-$0: a modern shell, or manually run the script under such a
-$0: shell if you do have one."
- fi
- exit 1
-fi
-fi
-fi
-SHELL=${CONFIG_SHELL-/bin/sh}
-export SHELL
-# Unset more variables known to interfere with behavior of common tools.
-CLICOLOR_FORCE= GREP_OPTIONS=
-unset CLICOLOR_FORCE GREP_OPTIONS
-
-## --------------------- ##
-## M4sh Shell Functions. ##
-## --------------------- ##
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
- { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
- return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
- set +e
- as_fn_set_status $1
- exit $1
-} # as_fn_exit
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
- case $as_dir in #(
- -*) as_dir=./$as_dir;;
- esac
- test -d "$as_dir" || eval $as_mkdir_p || {
- as_dirs=
- while :; do
- case $as_dir in #(
- *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
- *) as_qdir=$as_dir;;
- esac
- as_dirs="'$as_qdir' $as_dirs"
- as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$as_dir" : 'X\(//\)[^/]' \| \
- X"$as_dir" : 'X\(//\)$' \| \
- X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- test -d "$as_dir" && break
- done
- test -z "$as_dirs" || eval "mkdir $as_dirs"
- } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
- eval 'as_fn_append ()
- {
- eval $1+=\$2
- }'
-else
- as_fn_append ()
- {
- eval $1=\$$1\$2
- }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
- eval 'as_fn_arith ()
- {
- as_val=$(( $* ))
- }'
-else
- as_fn_arith ()
- {
- as_val=`expr "$@" || test $? -eq 1`
- }
-fi # as_fn_arith
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
- as_status=$1; test $as_status -eq 0 && as_status=1
- if test "$4"; then
- as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
- fi
- $as_echo "$as_me: error: $2" >&2
- as_fn_exit $as_status
-} # as_fn_error
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
- test "X`expr 00001 : '.*\(...\)'`" = X001; then
- as_expr=expr
-else
- as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
- as_basename=basename
-else
- as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
- as_dirname=dirname
-else
- as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
- X"$0" : 'X\(//\)$' \| \
- X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
- sed '/^.*\/\([^/][^/]*\)\/*$/{
- s//\1/
- q
- }
- /^X\/\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\/\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-
- as_lineno_1=$LINENO as_lineno_1a=$LINENO
- as_lineno_2=$LINENO as_lineno_2a=$LINENO
- eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
- test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
- # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-)
- sed -n '
- p
- /[$]LINENO/=
- ' <$as_myself |
- sed '
- s/[$]LINENO.*/&-/
- t lineno
- b
- :lineno
- N
- :loop
- s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
- t loop
- s/-\n.*//
- ' >$as_me.lineno &&
- chmod +x "$as_me.lineno" ||
- { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
-
- # Don't try to exec as it changes $[0], causing all sort of problems
- # (the dirname of $[0] is not the place where we might find the
- # original and so on. Autoconf is especially sensitive to this).
- . "./$as_me.lineno"
- # Exit status is that of the last command.
- exit
-}
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
- case `echo 'xy\c'` in
- *c*) ECHO_T=' ';; # ECHO_T is single tab character.
- xy) ECHO_C='\c';;
- *) echo `echo ksh88 bug on AIX 6.1` > /dev/null
- ECHO_T=' ';;
- esac;;
-*)
- ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
- rm -f conf$$.dir/conf$$.file
-else
- rm -f conf$$.dir
- mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
- if ln -s conf$$.file conf$$ 2>/dev/null; then
- as_ln_s='ln -s'
- # ... but there are two gotchas:
- # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
- # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
- # In both cases, we have to default to `cp -p'.
- ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
- as_ln_s='cp -p'
- elif ln conf$$.file conf$$ 2>/dev/null; then
- as_ln_s=ln
- else
- as_ln_s='cp -p'
- fi
-else
- as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-if mkdir -p . 2>/dev/null; then
- as_mkdir_p='mkdir -p "$as_dir"'
-else
- test -d ./-p && rmdir ./-p
- as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
- as_test_x='test -x'
-else
- if ls -dL / >/dev/null 2>&1; then
- as_ls_L_option=L
- else
- as_ls_L_option=
- fi
- as_test_x='
- eval sh -c '\''
- if test -d "$1"; then
- test -d "$1/.";
- else
- case $1 in #(
- -*)set "./$1";;
- esac;
- case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
- ???[sx]*):;;*)false;;esac;fi
- '\'' sh
- '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-
-
-# Check that we are running under the correct shell.
-SHELL=${CONFIG_SHELL-/bin/sh}
-
-case X$lt_ECHO in
-X*--fallback-echo)
- # Remove one level of quotation (which was required for Make).
- ECHO=`echo "$lt_ECHO" | sed 's,\\\\\$\\$0,'$0','`
- ;;
-esac
-
-ECHO=${lt_ECHO-echo}
-if test "X$1" = X--no-reexec; then
- # Discard the --no-reexec flag, and continue.
- shift
-elif test "X$1" = X--fallback-echo; then
- # Avoid inline document here, it may be left over
- :
-elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then
- # Yippee, $ECHO works!
- :
-else
- # Restart under the correct shell.
- exec $SHELL "$0" --no-reexec ${1+"$@"}
-fi
-
-if test "X$1" = X--fallback-echo; then
- # used as fallback echo
- shift
- cat <<_LT_EOF
-$*
-_LT_EOF
- exit 0
-fi
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-if test -z "$lt_ECHO"; then
- if test "X${echo_test_string+set}" != Xset; then
- # find a string as large as possible, as long as the shell can cope with it
- for cmd in 'sed 50q "$0"' 'sed 20q "$0"' 'sed 10q "$0"' 'sed 2q "$0"' 'echo test'; do
- # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ...
- if { echo_test_string=`eval $cmd`; } 2>/dev/null &&
- { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null
- then
- break
- fi
- done
- fi
-
- if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- :
- else
- # The Solaris, AIX, and Digital Unix default echo programs unquote
- # backslashes. This makes it impossible to quote backslashes using
- # echo "$something" | sed 's/\\/\\\\/g'
- #
- # So, first we look for a working echo in the user's PATH.
-
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for dir in $PATH /usr/ucb; do
- IFS="$lt_save_ifs"
- if (test -f $dir/echo || test -f $dir/echo$ac_exeext) &&
- test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- ECHO="$dir/echo"
- break
- fi
- done
- IFS="$lt_save_ifs"
-
- if test "X$ECHO" = Xecho; then
- # We didn't find a better echo, so look for alternatives.
- if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- # This shell has a builtin print -r that does the trick.
- ECHO='print -r'
- elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } &&
- test "X$CONFIG_SHELL" != X/bin/ksh; then
- # If we have ksh, try running configure again with it.
- ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh}
- export ORIGINAL_CONFIG_SHELL
- CONFIG_SHELL=/bin/ksh
- export CONFIG_SHELL
- exec $CONFIG_SHELL "$0" --no-reexec ${1+"$@"}
- else
- # Try using printf.
- ECHO='printf %s\n'
- if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- # Cool, printf works
- :
- elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` &&
- test "X$echo_testing_string" = 'X\t' &&
- echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL
- export CONFIG_SHELL
- SHELL="$CONFIG_SHELL"
- export SHELL
- ECHO="$CONFIG_SHELL $0 --fallback-echo"
- elif echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` &&
- test "X$echo_testing_string" = 'X\t' &&
- echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- ECHO="$CONFIG_SHELL $0 --fallback-echo"
- else
- # maybe with a smaller string...
- prev=:
-
- for cmd in 'echo test' 'sed 2q "$0"' 'sed 10q "$0"' 'sed 20q "$0"' 'sed 50q "$0"'; do
- if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null
- then
- break
- fi
- prev="$cmd"
- done
-
- if test "$prev" != 'sed 50q "$0"'; then
- echo_test_string=`eval $prev`
- export echo_test_string
- exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "$0" ${1+"$@"}
- else
- # Oops. We lost completely, so just stick with echo.
- ECHO=echo
- fi
- fi
- fi
- fi
- fi
-fi
-
-# Copy echo and quote the copy suitably for passing to libtool from
-# the Makefile, instead of quoting the original, which is used later.
-lt_ECHO=$ECHO
-if test "X$lt_ECHO" = "X$CONFIG_SHELL $0 --fallback-echo"; then
- lt_ECHO="$CONFIG_SHELL \\\$\$0 --fallback-echo"
-fi
-
-
-
-
-test -n "$DJDIR" || exec 7<&0 </dev/null
-exec 6>&1
-
-# Name of the host.
-# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
-# so uname gets run too.
-ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
-
-#
-# Initializations.
-#
-ac_default_prefix=/usr/local
-ac_clean_files=
-ac_config_libobj_dir=.
-LIBOBJS=
-cross_compiling=no
-subdirs=
-MFLAGS=
-MAKEFLAGS=
-
-# Identity of this package.
-PACKAGE_NAME='moses-compact-rule-table'
-PACKAGE_TARNAME='moses-compact-rule-table'
-PACKAGE_VERSION='1.0'
-PACKAGE_STRING='moses-compact-rule-table 1.0'
-PACKAGE_BUGREPORT='moses-support@mit.edu'
-PACKAGE_URL=''
-
-ac_unique_file="tools/Compactify.cpp"
-# Factoring default headers for most tests.
-ac_includes_default="\
-#include <stdio.h>
-#ifdef HAVE_SYS_TYPES_H
-# include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_STAT_H
-# include <sys/stat.h>
-#endif
-#ifdef STDC_HEADERS
-# include <stdlib.h>
-# include <stddef.h>
-#else
-# ifdef HAVE_STDLIB_H
-# include <stdlib.h>
-# endif
-#endif
-#ifdef HAVE_STRING_H
-# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
-# include <memory.h>
-# endif
-# include <string.h>
-#endif
-#ifdef HAVE_STRINGS_H
-# include <strings.h>
-#endif
-#ifdef HAVE_INTTYPES_H
-# include <inttypes.h>
-#endif
-#ifdef HAVE_STDINT_H
-# include <stdint.h>
-#endif
-#ifdef HAVE_UNISTD_H
-# include <unistd.h>
-#endif"
-
-ac_subst_vars='am__EXEEXT_FALSE
-am__EXEEXT_TRUE
-LTLIBOBJS
-LIBOBJS
-BOOST_PROGRAM_OPTIONS_LIBS
-BOOST_LDPATH
-BOOST_PROGRAM_OPTIONS_LDPATH
-BOOST_PROGRAM_OPTIONS_LDFLAGS
-BOOST_CPPFLAGS
-DISTCHECK_CONFIGURE_FLAGS
-BOOST_ROOT
-CXXCPP
-CPP
-OTOOL64
-OTOOL
-LIPO
-NMEDIT
-DSYMUTIL
-lt_ECHO
-RANLIB
-AR
-OBJDUMP
-LN_S
-NM
-ac_ct_DUMPBIN
-DUMPBIN
-LD
-FGREP
-EGREP
-GREP
-SED
-am__fastdepCC_FALSE
-am__fastdepCC_TRUE
-CCDEPMODE
-ac_ct_CC
-CFLAGS
-CC
-host_os
-host_vendor
-host_cpu
-host
-build_os
-build_vendor
-build_cpu
-build
-LIBTOOL
-am__fastdepCXX_FALSE
-am__fastdepCXX_TRUE
-CXXDEPMODE
-AMDEPBACKSLASH
-AMDEP_FALSE
-AMDEP_TRUE
-am__quote
-am__include
-DEPDIR
-OBJEXT
-EXEEXT
-ac_ct_CXX
-CPPFLAGS
-LDFLAGS
-CXXFLAGS
-CXX
-am__untar
-am__tar
-AMTAR
-am__leading_dot
-SET_MAKE
-AWK
-mkdir_p
-MKDIR_P
-INSTALL_STRIP_PROGRAM
-STRIP
-install_sh
-MAKEINFO
-AUTOHEADER
-AUTOMAKE
-AUTOCONF
-ACLOCAL
-VERSION
-PACKAGE
-CYGPATH_W
-am__isrc
-INSTALL_DATA
-INSTALL_SCRIPT
-INSTALL_PROGRAM
-target_alias
-host_alias
-build_alias
-LIBS
-ECHO_T
-ECHO_N
-ECHO_C
-DEFS
-mandir
-localedir
-libdir
-psdir
-pdfdir
-dvidir
-htmldir
-infodir
-docdir
-oldincludedir
-includedir
-localstatedir
-sharedstatedir
-sysconfdir
-datadir
-datarootdir
-libexecdir
-sbindir
-bindir
-program_transform_name
-prefix
-exec_prefix
-PACKAGE_URL
-PACKAGE_BUGREPORT
-PACKAGE_STRING
-PACKAGE_VERSION
-PACKAGE_TARNAME
-PACKAGE_NAME
-PATH_SEPARATOR
-SHELL'
-ac_subst_files=''
-ac_user_opts='
-enable_option_checking
-enable_dependency_tracking
-enable_shared
-enable_static
-with_pic
-enable_fast_install
-with_gnu_ld
-enable_libtool_lock
-with_boost
-enable_static_boost
-'
- ac_precious_vars='build_alias
-host_alias
-target_alias
-CXX
-CXXFLAGS
-LDFLAGS
-LIBS
-CPPFLAGS
-CCC
-CC
-CFLAGS
-CPP
-CXXCPP
-BOOST_ROOT'
-
-
-# Initialize some variables set by options.
-ac_init_help=
-ac_init_version=false
-ac_unrecognized_opts=
-ac_unrecognized_sep=
-# The variables have the same names as the options, with
-# dashes changed to underlines.
-cache_file=/dev/null
-exec_prefix=NONE
-no_create=
-no_recursion=
-prefix=NONE
-program_prefix=NONE
-program_suffix=NONE
-program_transform_name=s,x,x,
-silent=
-site=
-srcdir=
-verbose=
-x_includes=NONE
-x_libraries=NONE
-
-# Installation directory options.
-# These are left unexpanded so users can "make install exec_prefix=/foo"
-# and all the variables that are supposed to be based on exec_prefix
-# by default will actually change.
-# Use braces instead of parens because sh, perl, etc. also accept them.
-# (The list follows the same order as the GNU Coding Standards.)
-bindir='${exec_prefix}/bin'
-sbindir='${exec_prefix}/sbin'
-libexecdir='${exec_prefix}/libexec'
-datarootdir='${prefix}/share'
-datadir='${datarootdir}'
-sysconfdir='${prefix}/etc'
-sharedstatedir='${prefix}/com'
-localstatedir='${prefix}/var'
-includedir='${prefix}/include'
-oldincludedir='/usr/include'
-docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
-infodir='${datarootdir}/info'
-htmldir='${docdir}'
-dvidir='${docdir}'
-pdfdir='${docdir}'
-psdir='${docdir}'
-libdir='${exec_prefix}/lib'
-localedir='${datarootdir}/locale'
-mandir='${datarootdir}/man'
-
-ac_prev=
-ac_dashdash=
-for ac_option
-do
- # If the previous option needs an argument, assign it.
- if test -n "$ac_prev"; then
- eval $ac_prev=\$ac_option
- ac_prev=
- continue
- fi
-
- case $ac_option in
- *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
- *=) ac_optarg= ;;
- *) ac_optarg=yes ;;
- esac
-
- # Accept the important Cygnus configure options, so we can diagnose typos.
-
- case $ac_dashdash$ac_option in
- --)
- ac_dashdash=yes ;;
-
- -bindir | --bindir | --bindi | --bind | --bin | --bi)
- ac_prev=bindir ;;
- -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
- bindir=$ac_optarg ;;
-
- -build | --build | --buil | --bui | --bu)
- ac_prev=build_alias ;;
- -build=* | --build=* | --buil=* | --bui=* | --bu=*)
- build_alias=$ac_optarg ;;
-
- -cache-file | --cache-file | --cache-fil | --cache-fi \
- | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
- ac_prev=cache_file ;;
- -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
- | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
- cache_file=$ac_optarg ;;
-
- --config-cache | -C)
- cache_file=config.cache ;;
-
- -datadir | --datadir | --datadi | --datad)
- ac_prev=datadir ;;
- -datadir=* | --datadir=* | --datadi=* | --datad=*)
- datadir=$ac_optarg ;;
-
- -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
- | --dataroo | --dataro | --datar)
- ac_prev=datarootdir ;;
- -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
- | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
- datarootdir=$ac_optarg ;;
-
- -disable-* | --disable-*)
- ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
- # Reject names that are not valid shell variable names.
- expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid feature name: $ac_useropt"
- ac_useropt_orig=$ac_useropt
- ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
- case $ac_user_opts in
- *"
-"enable_$ac_useropt"
-"*) ;;
- *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
- ac_unrecognized_sep=', ';;
- esac
- eval enable_$ac_useropt=no ;;
-
- -docdir | --docdir | --docdi | --doc | --do)
- ac_prev=docdir ;;
- -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
- docdir=$ac_optarg ;;
-
- -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
- ac_prev=dvidir ;;
- -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
- dvidir=$ac_optarg ;;
-
- -enable-* | --enable-*)
- ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
- # Reject names that are not valid shell variable names.
- expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid feature name: $ac_useropt"
- ac_useropt_orig=$ac_useropt
- ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
- case $ac_user_opts in
- *"
-"enable_$ac_useropt"
-"*) ;;
- *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
- ac_unrecognized_sep=', ';;
- esac
- eval enable_$ac_useropt=\$ac_optarg ;;
-
- -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
- | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
- | --exec | --exe | --ex)
- ac_prev=exec_prefix ;;
- -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
- | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
- | --exec=* | --exe=* | --ex=*)
- exec_prefix=$ac_optarg ;;
-
- -gas | --gas | --ga | --g)
- # Obsolete; use --with-gas.
- with_gas=yes ;;
-
- -help | --help | --hel | --he | -h)
- ac_init_help=long ;;
- -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
- ac_init_help=recursive ;;
- -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
- ac_init_help=short ;;
-
- -host | --host | --hos | --ho)
- ac_prev=host_alias ;;
- -host=* | --host=* | --hos=* | --ho=*)
- host_alias=$ac_optarg ;;
-
- -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
- ac_prev=htmldir ;;
- -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
- | --ht=*)
- htmldir=$ac_optarg ;;
-
- -includedir | --includedir | --includedi | --included | --include \
- | --includ | --inclu | --incl | --inc)
- ac_prev=includedir ;;
- -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
- | --includ=* | --inclu=* | --incl=* | --inc=*)
- includedir=$ac_optarg ;;
-
- -infodir | --infodir | --infodi | --infod | --info | --inf)
- ac_prev=infodir ;;
- -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
- infodir=$ac_optarg ;;
-
- -libdir | --libdir | --libdi | --libd)
- ac_prev=libdir ;;
- -libdir=* | --libdir=* | --libdi=* | --libd=*)
- libdir=$ac_optarg ;;
-
- -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
- | --libexe | --libex | --libe)
- ac_prev=libexecdir ;;
- -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
- | --libexe=* | --libex=* | --libe=*)
- libexecdir=$ac_optarg ;;
-
- -localedir | --localedir | --localedi | --localed | --locale)
- ac_prev=localedir ;;
- -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
- localedir=$ac_optarg ;;
-
- -localstatedir | --localstatedir | --localstatedi | --localstated \
- | --localstate | --localstat | --localsta | --localst | --locals)
- ac_prev=localstatedir ;;
- -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
- | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
- localstatedir=$ac_optarg ;;
-
- -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
- ac_prev=mandir ;;
- -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
- mandir=$ac_optarg ;;
-
- -nfp | --nfp | --nf)
- # Obsolete; use --without-fp.
- with_fp=no ;;
-
- -no-create | --no-create | --no-creat | --no-crea | --no-cre \
- | --no-cr | --no-c | -n)
- no_create=yes ;;
-
- -no-recursion | --no-recursion | --no-recursio | --no-recursi \
- | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
- no_recursion=yes ;;
-
- -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
- | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
- | --oldin | --oldi | --old | --ol | --o)
- ac_prev=oldincludedir ;;
- -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
- | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
- | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
- oldincludedir=$ac_optarg ;;
-
- -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
- ac_prev=prefix ;;
- -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
- prefix=$ac_optarg ;;
-
- -program-prefix | --program-prefix | --program-prefi | --program-pref \
- | --program-pre | --program-pr | --program-p)
- ac_prev=program_prefix ;;
- -program-prefix=* | --program-prefix=* | --program-prefi=* \
- | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
- program_prefix=$ac_optarg ;;
-
- -program-suffix | --program-suffix | --program-suffi | --program-suff \
- | --program-suf | --program-su | --program-s)
- ac_prev=program_suffix ;;
- -program-suffix=* | --program-suffix=* | --program-suffi=* \
- | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
- program_suffix=$ac_optarg ;;
-
- -program-transform-name | --program-transform-name \
- | --program-transform-nam | --program-transform-na \
- | --program-transform-n | --program-transform- \
- | --program-transform | --program-transfor \
- | --program-transfo | --program-transf \
- | --program-trans | --program-tran \
- | --progr-tra | --program-tr | --program-t)
- ac_prev=program_transform_name ;;
- -program-transform-name=* | --program-transform-name=* \
- | --program-transform-nam=* | --program-transform-na=* \
- | --program-transform-n=* | --program-transform-=* \
- | --program-transform=* | --program-transfor=* \
- | --program-transfo=* | --program-transf=* \
- | --program-trans=* | --program-tran=* \
- | --progr-tra=* | --program-tr=* | --program-t=*)
- program_transform_name=$ac_optarg ;;
-
- -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
- ac_prev=pdfdir ;;
- -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
- pdfdir=$ac_optarg ;;
-
- -psdir | --psdir | --psdi | --psd | --ps)
- ac_prev=psdir ;;
- -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
- psdir=$ac_optarg ;;
-
- -q | -quiet | --quiet | --quie | --qui | --qu | --q \
- | -silent | --silent | --silen | --sile | --sil)
- silent=yes ;;
-
- -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
- ac_prev=sbindir ;;
- -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
- | --sbi=* | --sb=*)
- sbindir=$ac_optarg ;;
-
- -sharedstatedir | --sharedstatedir | --sharedstatedi \
- | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
- | --sharedst | --shareds | --shared | --share | --shar \
- | --sha | --sh)
- ac_prev=sharedstatedir ;;
- -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
- | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
- | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
- | --sha=* | --sh=*)
- sharedstatedir=$ac_optarg ;;
-
- -site | --site | --sit)
- ac_prev=site ;;
- -site=* | --site=* | --sit=*)
- site=$ac_optarg ;;
-
- -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
- ac_prev=srcdir ;;
- -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
- srcdir=$ac_optarg ;;
-
- -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
- | --syscon | --sysco | --sysc | --sys | --sy)
- ac_prev=sysconfdir ;;
- -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
- | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
- sysconfdir=$ac_optarg ;;
-
- -target | --target | --targe | --targ | --tar | --ta | --t)
- ac_prev=target_alias ;;
- -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
- target_alias=$ac_optarg ;;
-
- -v | -verbose | --verbose | --verbos | --verbo | --verb)
- verbose=yes ;;
-
- -version | --version | --versio | --versi | --vers | -V)
- ac_init_version=: ;;
-
- -with-* | --with-*)
- ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
- # Reject names that are not valid shell variable names.
- expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid package name: $ac_useropt"
- ac_useropt_orig=$ac_useropt
- ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
- case $ac_user_opts in
- *"
-"with_$ac_useropt"
-"*) ;;
- *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
- ac_unrecognized_sep=', ';;
- esac
- eval with_$ac_useropt=\$ac_optarg ;;
-
- -without-* | --without-*)
- ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
- # Reject names that are not valid shell variable names.
- expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid package name: $ac_useropt"
- ac_useropt_orig=$ac_useropt
- ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
- case $ac_user_opts in
- *"
-"with_$ac_useropt"
-"*) ;;
- *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
- ac_unrecognized_sep=', ';;
- esac
- eval with_$ac_useropt=no ;;
-
- --x)
- # Obsolete; use --with-x.
- with_x=yes ;;
-
- -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
- | --x-incl | --x-inc | --x-in | --x-i)
- ac_prev=x_includes ;;
- -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
- | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
- x_includes=$ac_optarg ;;
-
- -x-libraries | --x-libraries | --x-librarie | --x-librari \
- | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
- ac_prev=x_libraries ;;
- -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
- | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
- x_libraries=$ac_optarg ;;
-
- -*) as_fn_error $? "unrecognized option: \`$ac_option'
-Try \`$0 --help' for more information"
- ;;
-
- *=*)
- ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
- # Reject names that are not valid shell variable names.
- case $ac_envvar in #(
- '' | [0-9]* | *[!_$as_cr_alnum]* )
- as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
- esac
- eval $ac_envvar=\$ac_optarg
- export $ac_envvar ;;
-
- *)
- # FIXME: should be removed in autoconf 3.0.
- $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
- expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
- $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
- : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
- ;;
-
- esac
-done
-
-if test -n "$ac_prev"; then
- ac_option=--`echo $ac_prev | sed 's/_/-/g'`
- as_fn_error $? "missing argument to $ac_option"
-fi
-
-if test -n "$ac_unrecognized_opts"; then
- case $enable_option_checking in
- no) ;;
- fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
- *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
- esac
-fi
-
-# Check all directory arguments for consistency.
-for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \
- datadir sysconfdir sharedstatedir localstatedir includedir \
- oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
- libdir localedir mandir
-do
- eval ac_val=\$$ac_var
- # Remove trailing slashes.
- case $ac_val in
- */ )
- ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
- eval $ac_var=\$ac_val;;
- esac
- # Be sure to have absolute directory names.
- case $ac_val in
- [\\/$]* | ?:[\\/]* ) continue;;
- NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
- esac
- as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
-done
-
-# There might be people who depend on the old broken behavior: `$host'
-# used to hold the argument of --host etc.
-# FIXME: To remove some day.
-build=$build_alias
-host=$host_alias
-target=$target_alias
-
-# FIXME: To remove some day.
-if test "x$host_alias" != x; then
- if test "x$build_alias" = x; then
- cross_compiling=maybe
- $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
- If a cross compiler is detected then cross compile mode will be used" >&2
- elif test "x$build_alias" != "x$host_alias"; then
- cross_compiling=yes
- fi
-fi
-
-ac_tool_prefix=
-test -n "$host_alias" && ac_tool_prefix=$host_alias-
-
-test "$silent" = yes && exec 6>/dev/null
-
-
-ac_pwd=`pwd` && test -n "$ac_pwd" &&
-ac_ls_di=`ls -di .` &&
-ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
- as_fn_error $? "working directory cannot be determined"
-test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
- as_fn_error $? "pwd does not report name of working directory"
-
-
-# Find the source files, if location was not specified.
-if test -z "$srcdir"; then
- ac_srcdir_defaulted=yes
- # Try the directory containing this script, then the parent directory.
- ac_confdir=`$as_dirname -- "$as_myself" ||
-$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$as_myself" : 'X\(//\)[^/]' \| \
- X"$as_myself" : 'X\(//\)$' \| \
- X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_myself" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- srcdir=$ac_confdir
- if test ! -r "$srcdir/$ac_unique_file"; then
- srcdir=..
- fi
-else
- ac_srcdir_defaulted=no
-fi
-if test ! -r "$srcdir/$ac_unique_file"; then
- test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
- as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
-fi
-ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
-ac_abs_confdir=`(
- cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
- pwd)`
-# When building in place, set srcdir=.
-if test "$ac_abs_confdir" = "$ac_pwd"; then
- srcdir=.
-fi
-# Remove unnecessary trailing slashes from srcdir.
-# Double slashes in file names in object file debugging info
-# mess up M-x gdb in Emacs.
-case $srcdir in
-*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
-esac
-for ac_var in $ac_precious_vars; do
- eval ac_env_${ac_var}_set=\${${ac_var}+set}
- eval ac_env_${ac_var}_value=\$${ac_var}
- eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
- eval ac_cv_env_${ac_var}_value=\$${ac_var}
-done
-
-#
-# Report the --help message.
-#
-if test "$ac_init_help" = "long"; then
- # Omit some internal or obsolete options to make the list less imposing.
- # This message is too long to be a string in the A/UX 3.1 sh.
- cat <<_ACEOF
-\`configure' configures moses-compact-rule-table 1.0 to adapt to many kinds of systems.
-
-Usage: $0 [OPTION]... [VAR=VALUE]...
-
-To assign environment variables (e.g., CC, CFLAGS...), specify them as
-VAR=VALUE. See below for descriptions of some of the useful variables.
-
-Defaults for the options are specified in brackets.
-
-Configuration:
- -h, --help display this help and exit
- --help=short display options specific to this package
- --help=recursive display the short help of all the included packages
- -V, --version display version information and exit
- -q, --quiet, --silent do not print \`checking ...' messages
- --cache-file=FILE cache test results in FILE [disabled]
- -C, --config-cache alias for \`--cache-file=config.cache'
- -n, --no-create do not create output files
- --srcdir=DIR find the sources in DIR [configure dir or \`..']
-
-Installation directories:
- --prefix=PREFIX install architecture-independent files in PREFIX
- [$ac_default_prefix]
- --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX
- [PREFIX]
-
-By default, \`make install' will install all the files in
-\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify
-an installation prefix other than \`$ac_default_prefix' using \`--prefix',
-for instance \`--prefix=\$HOME'.
-
-For better control, use the options below.
-
-Fine tuning of the installation directories:
- --bindir=DIR user executables [EPREFIX/bin]
- --sbindir=DIR system admin executables [EPREFIX/sbin]
- --libexecdir=DIR program executables [EPREFIX/libexec]
- --sysconfdir=DIR read-only single-machine data [PREFIX/etc]
- --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com]
- --localstatedir=DIR modifiable single-machine data [PREFIX/var]
- --libdir=DIR object code libraries [EPREFIX/lib]
- --includedir=DIR C header files [PREFIX/include]
- --oldincludedir=DIR C header files for non-gcc [/usr/include]
- --datarootdir=DIR read-only arch.-independent data root [PREFIX/share]
- --datadir=DIR read-only architecture-independent data [DATAROOTDIR]
- --infodir=DIR info documentation [DATAROOTDIR/info]
- --localedir=DIR locale-dependent data [DATAROOTDIR/locale]
- --mandir=DIR man documentation [DATAROOTDIR/man]
- --docdir=DIR documentation root
- [DATAROOTDIR/doc/moses-compact-rule-table]
- --htmldir=DIR html documentation [DOCDIR]
- --dvidir=DIR dvi documentation [DOCDIR]
- --pdfdir=DIR pdf documentation [DOCDIR]
- --psdir=DIR ps documentation [DOCDIR]
-_ACEOF
-
- cat <<\_ACEOF
-
-Program names:
- --program-prefix=PREFIX prepend PREFIX to installed program names
- --program-suffix=SUFFIX append SUFFIX to installed program names
- --program-transform-name=PROGRAM run sed PROGRAM on installed program names
-
-System types:
- --build=BUILD configure for building on BUILD [guessed]
- --host=HOST cross-compile to build programs to run on HOST [BUILD]
-_ACEOF
-fi
-
-if test -n "$ac_init_help"; then
- case $ac_init_help in
- short | recursive ) echo "Configuration of moses-compact-rule-table 1.0:";;
- esac
- cat <<\_ACEOF
-
-Optional Features:
- --disable-option-checking ignore unrecognized --enable/--with options
- --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no)
- --enable-FEATURE[=ARG] include FEATURE [ARG=yes]
- --disable-dependency-tracking speeds up one-time build
- --enable-dependency-tracking do not reject slow dependency extractors
- --enable-shared[=PKGS] build shared libraries [default=yes]
- --enable-static[=PKGS] build static libraries [default=yes]
- --enable-fast-install[=PKGS]
- optimize for fast installation [default=yes]
- --disable-libtool-lock avoid locking (might break parallel builds)
- --enable-static-boost Prefer the static boost libraries over the shared
- ones [no]
-
-Optional Packages:
- --with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
- --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no)
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
- --with-boost=DIR prefix of Boost [guess]
-
-Some influential environment variables:
- CXX C++ compiler command
- CXXFLAGS C++ compiler flags
- LDFLAGS linker flags, e.g. -L<lib dir> if you have libraries in a
- nonstandard directory <lib dir>
- LIBS libraries to pass to the linker, e.g. -l<library>
- CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
- you have headers in a nonstandard directory <include dir>
- CC C compiler command
- CFLAGS C compiler flags
- CPP C preprocessor
- CXXCPP C++ preprocessor
- BOOST_ROOT Location of Boost installation
-
-Use these variables to override the choices made by `configure' or to help
-it to find libraries and programs with nonstandard names/locations.
-
-Report bugs to <moses-support@mit.edu>.
-_ACEOF
-ac_status=$?
-fi
-
-if test "$ac_init_help" = "recursive"; then
- # If there are subdirs, report their specific --help.
- for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
- test -d "$ac_dir" ||
- { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
- continue
- ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
- ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
- # A ".." for each directory in $ac_dir_suffix.
- ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
- case $ac_top_builddir_sub in
- "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
- *) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
- esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
- .) # We are building in place.
- ac_srcdir=.
- ac_top_srcdir=$ac_top_builddir_sub
- ac_abs_top_srcdir=$ac_pwd ;;
- [\\/]* | ?:[\\/]* ) # Absolute name.
- ac_srcdir=$srcdir$ac_dir_suffix;
- ac_top_srcdir=$srcdir
- ac_abs_top_srcdir=$srcdir ;;
- *) # Relative name.
- ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
- ac_top_srcdir=$ac_top_build_prefix$srcdir
- ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
- cd "$ac_dir" || { ac_status=$?; continue; }
- # Check for guested configure.
- if test -f "$ac_srcdir/configure.gnu"; then
- echo &&
- $SHELL "$ac_srcdir/configure.gnu" --help=recursive
- elif test -f "$ac_srcdir/configure"; then
- echo &&
- $SHELL "$ac_srcdir/configure" --help=recursive
- else
- $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
- fi || ac_status=$?
- cd "$ac_pwd" || { ac_status=$?; break; }
- done
-fi
-
-test -n "$ac_init_help" && exit $ac_status
-if $ac_init_version; then
- cat <<\_ACEOF
-moses-compact-rule-table configure 1.0
-generated by GNU Autoconf 2.68
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This configure script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it.
-_ACEOF
- exit
-fi
-
-## ------------------------ ##
-## Autoconf initialization. ##
-## ------------------------ ##
-
-# ac_fn_cxx_try_compile LINENO
-# ----------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_cxx_try_compile ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext
- if { { ac_try="$ac_compile"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compile") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && {
- test -z "$ac_cxx_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest.$ac_objext; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_cxx_try_compile
-
-# ac_fn_c_try_compile LINENO
-# --------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_compile ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext
- if { { ac_try="$ac_compile"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compile") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && {
- test -z "$ac_c_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest.$ac_objext; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_compile
-
-# ac_fn_c_try_link LINENO
-# -----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_link ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext conftest$ac_exeext
- if { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && {
- test -z "$ac_c_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest$ac_exeext && {
- test "$cross_compiling" = yes ||
- $as_test_x conftest$ac_exeext
- }; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
- # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
- # interfere with the next link command; also delete a directory that is
- # left behind by Apple's compiler. We do this before executing the actions.
- rm -rf conftest.dSYM conftest_ipa8_conftest.oo
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_link
-
-# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
-# -------------------------------------------------------
-# Tests whether HEADER exists and can be compiled using the include files in
-# INCLUDES, setting the cache variable VAR accordingly.
-ac_fn_c_check_header_compile ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
- eval "$3=yes"
-else
- eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_header_compile
-
-# ac_fn_c_try_cpp LINENO
-# ----------------------
-# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_cpp ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- if { { ac_try="$ac_cpp conftest.$ac_ext"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } > conftest.i && {
- test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
- test ! -s conftest.err
- }; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_cpp
-
-# ac_fn_c_try_run LINENO
-# ----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
-# that executables *can* be run.
-ac_fn_c_try_run ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- if { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
- { { case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_try") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; }; then :
- ac_retval=0
-else
- $as_echo "$as_me: program exited with status $ac_status" >&5
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=$ac_status
-fi
- rm -rf conftest.dSYM conftest_ipa8_conftest.oo
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_run
-
-# ac_fn_c_check_func LINENO FUNC VAR
-# ----------------------------------
-# Tests whether FUNC exists, setting the cache variable VAR accordingly
-ac_fn_c_check_func ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
- For example, HP-UX 11i <limits.h> declares gettimeofday. */
-#define $2 innocuous_$2
-
-/* System header to define __stub macros and hopefully few prototypes,
- which can conflict with char $2 (); below.
- Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
- <limits.h> exists even on freestanding compilers. */
-
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-
-#undef $2
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char $2 ();
-/* The GNU C library defines this for functions which it implements
- to always fail with ENOSYS. Some functions are actually named
- something starting with __ and the normal name is an alias. */
-#if defined __stub_$2 || defined __stub___$2
-choke me
-#endif
-
-int
-main ()
-{
-return $2 ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- eval "$3=yes"
-else
- eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-fi
-eval ac_res=\$$3
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_func
-
-# ac_fn_cxx_try_cpp LINENO
-# ------------------------
-# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
-ac_fn_cxx_try_cpp ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- if { { ac_try="$ac_cpp conftest.$ac_ext"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } > conftest.i && {
- test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" ||
- test ! -s conftest.err
- }; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_cxx_try_cpp
-
-# ac_fn_cxx_try_link LINENO
-# -------------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_cxx_try_link ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext conftest$ac_exeext
- if { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && {
- test -z "$ac_cxx_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest$ac_exeext && {
- test "$cross_compiling" = yes ||
- $as_test_x conftest$ac_exeext
- }; then :
- ac_retval=0
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- ac_retval=1
-fi
- # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
- # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
- # interfere with the next link command; also delete a directory that is
- # left behind by Apple's compiler. We do this before executing the actions.
- rm -rf conftest.dSYM conftest_ipa8_conftest.oo
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
- as_fn_set_status $ac_retval
-
-} # ac_fn_cxx_try_link
-
-# ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES
-# ---------------------------------------------------------
-# Tests whether HEADER exists, giving a warning if it cannot be compiled using
-# the include files in INCLUDES and setting the cache variable VAR
-# accordingly.
-ac_fn_cxx_check_header_mongrel ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- if eval \${$3+:} false; then :
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
- $as_echo_n "(cached) " >&6
-fi
-eval ac_res=\$$3
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-else
- # Is the header compilable?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5
-$as_echo_n "checking $2 usability... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_header_compiler=yes
-else
- ac_header_compiler=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5
-$as_echo "$ac_header_compiler" >&6; }
-
-# Is the header present?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5
-$as_echo_n "checking $2 presence... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <$2>
-_ACEOF
-if ac_fn_cxx_try_cpp "$LINENO"; then :
- ac_header_preproc=yes
-else
- ac_header_preproc=no
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
-$as_echo "$ac_header_preproc" >&6; }
-
-# So? What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #((
- yes:no: )
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5
-$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
- ;;
- no:yes:* )
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5
-$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5
-$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5
-$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5
-$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( $as_echo "## ------------------------------------ ##
-## Report this to moses-support@mit.edu ##
-## ------------------------------------ ##"
- ) | sed "s/^/$as_me: WARNING: /" >&2
- ;;
-esac
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- eval "$3=\$ac_header_compiler"
-fi
-eval ac_res=\$$3
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-fi
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_cxx_check_header_mongrel
-
-# ac_fn_cxx_check_type LINENO TYPE VAR INCLUDES
-# ---------------------------------------------
-# Tests whether TYPE exists after having included INCLUDES, setting cache
-# variable VAR accordingly.
-ac_fn_cxx_check_type ()
-{
- as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- eval "$3=no"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$4
-int
-main ()
-{
-if (sizeof ($2))
- return 0;
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$4
-int
-main ()
-{
-if (sizeof (($2)))
- return 0;
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
-
-else
- eval "$3=yes"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
- eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_cxx_check_type
-cat >config.log <<_ACEOF
-This file contains any messages produced by compilers while
-running configure, to aid debugging if configure makes a mistake.
-
-It was created by moses-compact-rule-table $as_me 1.0, which was
-generated by GNU Autoconf 2.68. Invocation command line was
-
- $ $0 $@
-
-_ACEOF
-exec 5>>config.log
-{
-cat <<_ASUNAME
-## --------- ##
-## Platform. ##
-## --------- ##
-
-hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
-
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
-/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown`
-
-/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown`
-/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown`
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
-/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown`
-/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown`
-/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown`
-/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown`
-
-_ASUNAME
-
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- $as_echo "PATH: $as_dir"
- done
-IFS=$as_save_IFS
-
-} >&5
-
-cat >&5 <<_ACEOF
-
-
-## ----------- ##
-## Core tests. ##
-## ----------- ##
-
-_ACEOF
-
-
-# Keep a trace of the command line.
-# Strip out --no-create and --no-recursion so they do not pile up.
-# Strip out --silent because we don't want to record it for future runs.
-# Also quote any args containing shell meta-characters.
-# Make two passes to allow for proper duplicate-argument suppression.
-ac_configure_args=
-ac_configure_args0=
-ac_configure_args1=
-ac_must_keep_next=false
-for ac_pass in 1 2
-do
- for ac_arg
- do
- case $ac_arg in
- -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
- -q | -quiet | --quiet | --quie | --qui | --qu | --q \
- | -silent | --silent | --silen | --sile | --sil)
- continue ;;
- *\'*)
- ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
- esac
- case $ac_pass in
- 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
- 2)
- as_fn_append ac_configure_args1 " '$ac_arg'"
- if test $ac_must_keep_next = true; then
- ac_must_keep_next=false # Got value, back to normal.
- else
- case $ac_arg in
- *=* | --config-cache | -C | -disable-* | --disable-* \
- | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
- | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
- | -with-* | --with-* | -without-* | --without-* | --x)
- case "$ac_configure_args0 " in
- "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
- esac
- ;;
- -* ) ac_must_keep_next=true ;;
- esac
- fi
- as_fn_append ac_configure_args " '$ac_arg'"
- ;;
- esac
- done
-done
-{ ac_configure_args0=; unset ac_configure_args0;}
-{ ac_configure_args1=; unset ac_configure_args1;}
-
-# When interrupted or exit'd, cleanup temporary files, and complete
-# config.log. We remove comments because anyway the quotes in there
-# would cause problems or look ugly.
-# WARNING: Use '\'' to represent an apostrophe within the trap.
-# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
-trap 'exit_status=$?
- # Save into config.log some information that might help in debugging.
- {
- echo
-
- $as_echo "## ---------------- ##
-## Cache variables. ##
-## ---------------- ##"
- echo
- # The following way of writing the cache mishandles newlines in values,
-(
- for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
- eval ac_val=\$$ac_var
- case $ac_val in #(
- *${as_nl}*)
- case $ac_var in #(
- *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
- esac
- case $ac_var in #(
- _ | IFS | as_nl) ;; #(
- BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
- *) { eval $ac_var=; unset $ac_var;} ;;
- esac ;;
- esac
- done
- (set) 2>&1 |
- case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
- *${as_nl}ac_space=\ *)
- sed -n \
- "s/'\''/'\''\\\\'\'''\''/g;
- s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
- ;; #(
- *)
- sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
- ;;
- esac |
- sort
-)
- echo
-
- $as_echo "## ----------------- ##
-## Output variables. ##
-## ----------------- ##"
- echo
- for ac_var in $ac_subst_vars
- do
- eval ac_val=\$$ac_var
- case $ac_val in
- *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
- esac
- $as_echo "$ac_var='\''$ac_val'\''"
- done | sort
- echo
-
- if test -n "$ac_subst_files"; then
- $as_echo "## ------------------- ##
-## File substitutions. ##
-## ------------------- ##"
- echo
- for ac_var in $ac_subst_files
- do
- eval ac_val=\$$ac_var
- case $ac_val in
- *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
- esac
- $as_echo "$ac_var='\''$ac_val'\''"
- done | sort
- echo
- fi
-
- if test -s confdefs.h; then
- $as_echo "## ----------- ##
-## confdefs.h. ##
-## ----------- ##"
- echo
- cat confdefs.h
- echo
- fi
- test "$ac_signal" != 0 &&
- $as_echo "$as_me: caught signal $ac_signal"
- $as_echo "$as_me: exit $exit_status"
- } >&5
- rm -f core *.core core.conftest.* &&
- rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
- exit $exit_status
-' 0
-for ac_signal in 1 2 13 15; do
- trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
-done
-ac_signal=0
-
-# confdefs.h avoids OS command line length limits that DEFS can exceed.
-rm -f -r conftest* confdefs.h
-
-$as_echo "/* confdefs.h */" > confdefs.h
-
-# Predefined preprocessor variables.
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_NAME "$PACKAGE_NAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_VERSION "$PACKAGE_VERSION"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_STRING "$PACKAGE_STRING"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_URL "$PACKAGE_URL"
-_ACEOF
-
-
-# Let the site file select an alternate cache file if it wants to.
-# Prefer an explicitly selected file to automatically selected ones.
-ac_site_file1=NONE
-ac_site_file2=NONE
-if test -n "$CONFIG_SITE"; then
- # We do not want a PATH search for config.site.
- case $CONFIG_SITE in #((
- -*) ac_site_file1=./$CONFIG_SITE;;
- */*) ac_site_file1=$CONFIG_SITE;;
- *) ac_site_file1=./$CONFIG_SITE;;
- esac
-elif test "x$prefix" != xNONE; then
- ac_site_file1=$prefix/share/config.site
- ac_site_file2=$prefix/etc/config.site
-else
- ac_site_file1=$ac_default_prefix/share/config.site
- ac_site_file2=$ac_default_prefix/etc/config.site
-fi
-for ac_site_file in "$ac_site_file1" "$ac_site_file2"
-do
- test "x$ac_site_file" = xNONE && continue
- if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
-$as_echo "$as_me: loading site script $ac_site_file" >&6;}
- sed 's/^/| /' "$ac_site_file" >&5
- . "$ac_site_file" \
- || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "failed to load site script $ac_site_file
-See \`config.log' for more details" "$LINENO" 5; }
- fi
-done
-
-if test -r "$cache_file"; then
- # Some versions of bash will fail to source /dev/null (special files
- # actually), so we avoid doing that. DJGPP emulates it as a regular file.
- if test /dev/null != "$cache_file" && test -f "$cache_file"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
-$as_echo "$as_me: loading cache $cache_file" >&6;}
- case $cache_file in
- [\\/]* | ?:[\\/]* ) . "$cache_file";;
- *) . "./$cache_file";;
- esac
- fi
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
-$as_echo "$as_me: creating cache $cache_file" >&6;}
- >$cache_file
-fi
-
-# Check that the precious variables saved in the cache have kept the same
-# value.
-ac_cache_corrupted=false
-for ac_var in $ac_precious_vars; do
- eval ac_old_set=\$ac_cv_env_${ac_var}_set
- eval ac_new_set=\$ac_env_${ac_var}_set
- eval ac_old_val=\$ac_cv_env_${ac_var}_value
- eval ac_new_val=\$ac_env_${ac_var}_value
- case $ac_old_set,$ac_new_set in
- set,)
- { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
- ac_cache_corrupted=: ;;
- ,set)
- { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
- ac_cache_corrupted=: ;;
- ,);;
- *)
- if test "x$ac_old_val" != "x$ac_new_val"; then
- # differences in whitespace do not lead to failure.
- ac_old_val_w=`echo x $ac_old_val`
- ac_new_val_w=`echo x $ac_new_val`
- if test "$ac_old_val_w" != "$ac_new_val_w"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
-$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
- ac_cache_corrupted=:
- else
- { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
-$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
- eval $ac_var=\$ac_old_val
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5
-$as_echo "$as_me: former value: \`$ac_old_val'" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5
-$as_echo "$as_me: current value: \`$ac_new_val'" >&2;}
- fi;;
- esac
- # Pass precious variables to config.status.
- if test "$ac_new_set" = set; then
- case $ac_new_val in
- *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
- *) ac_arg=$ac_var=$ac_new_val ;;
- esac
- case " $ac_configure_args " in
- *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy.
- *) as_fn_append ac_configure_args " '$ac_arg'" ;;
- esac
- fi
-done
-if $ac_cache_corrupted; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
- { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
-$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
- as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
-fi
-## -------------------- ##
-## Main body of script. ##
-## -------------------- ##
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-am__api_version='1.11'
-
-ac_aux_dir=
-for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
- if test -f "$ac_dir/install-sh"; then
- ac_aux_dir=$ac_dir
- ac_install_sh="$ac_aux_dir/install-sh -c"
- break
- elif test -f "$ac_dir/install.sh"; then
- ac_aux_dir=$ac_dir
- ac_install_sh="$ac_aux_dir/install.sh -c"
- break
- elif test -f "$ac_dir/shtool"; then
- ac_aux_dir=$ac_dir
- ac_install_sh="$ac_aux_dir/shtool install -c"
- break
- fi
-done
-if test -z "$ac_aux_dir"; then
- as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
-fi
-
-# These three variables are undocumented and unsupported,
-# and are intended to be withdrawn in a future Autoconf release.
-# They can cause serious problems if a builder's source tree is in a directory
-# whose full name contains unusual characters.
-ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var.
-ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var.
-ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var.
-
-
-# Find a good install program. We prefer a C program (faster),
-# so one script is as good as another. But avoid the broken or
-# incompatible versions:
-# SysV /etc/install, /usr/sbin/install
-# SunOS /usr/etc/install
-# IRIX /sbin/install
-# AIX /bin/install
-# AmigaOS /C/install, which installs bootblocks on floppy discs
-# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
-# AFS /usr/afsws/bin/install, which mishandles nonexistent args
-# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
-# OS/2's system install, which has a completely different semantic
-# ./install, which can be erroneously created by make from ./install.sh.
-# Reject install programs that cannot install multiple files.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
-$as_echo_n "checking for a BSD-compatible install... " >&6; }
-if test -z "$INSTALL"; then
-if ${ac_cv_path_install+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- # Account for people who put trailing slashes in PATH elements.
-case $as_dir/ in #((
- ./ | .// | /[cC]/* | \
- /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
- ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
- /usr/ucb/* ) ;;
- *)
- # OSF1 and SCO ODT 3.0 have their own names for install.
- # Don't use installbsd from OSF since it installs stuff as root
- # by default.
- for ac_prog in ginstall scoinst install; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
- if test $ac_prog = install &&
- grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
- # AIX install. It has an incompatible calling convention.
- :
- elif test $ac_prog = install &&
- grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
- # program-specific install script used by HP pwplus--don't use.
- :
- else
- rm -rf conftest.one conftest.two conftest.dir
- echo one > conftest.one
- echo two > conftest.two
- mkdir conftest.dir
- if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
- test -s conftest.one && test -s conftest.two &&
- test -s conftest.dir/conftest.one &&
- test -s conftest.dir/conftest.two
- then
- ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
- break 3
- fi
- fi
- fi
- done
- done
- ;;
-esac
-
- done
-IFS=$as_save_IFS
-
-rm -rf conftest.one conftest.two conftest.dir
-
-fi
- if test "${ac_cv_path_install+set}" = set; then
- INSTALL=$ac_cv_path_install
- else
- # As a last resort, use the slow shell script. Don't cache a
- # value for INSTALL within a source directory, because that will
- # break other packages using the cache if that directory is
- # removed, or if the value is a relative name.
- INSTALL=$ac_install_sh
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
-$as_echo "$INSTALL" >&6; }
-
-# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
-# It thinks the first close brace ends the variable substitution.
-test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
-
-test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
-
-test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
-$as_echo_n "checking whether build environment is sane... " >&6; }
-# Just in case
-sleep 1
-echo timestamp > conftest.file
-# Reject unsafe characters in $srcdir or the absolute working directory
-# name. Accept space and tab only in the latter.
-am_lf='
-'
-case `pwd` in
- *[\\\"\#\$\&\'\`$am_lf]*)
- as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
-esac
-case $srcdir in
- *[\\\"\#\$\&\'\`$am_lf\ \ ]*)
- as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;;
-esac
-
-# Do `set' in a subshell so we don't clobber the current shell's
-# arguments. Must try -L first in case configure is actually a
-# symlink; some systems play weird games with the mod time of symlinks
-# (eg FreeBSD returns the mod time of the symlink's containing
-# directory).
-if (
- set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
- if test "$*" = "X"; then
- # -L didn't work.
- set X `ls -t "$srcdir/configure" conftest.file`
- fi
- rm -f conftest.file
- if test "$*" != "X $srcdir/configure conftest.file" \
- && test "$*" != "X conftest.file $srcdir/configure"; then
-
- # If neither matched, then we have a broken ls. This can happen
- # if, for instance, CONFIG_SHELL is bash and it inherits a
- # broken ls alias from the environment. This has actually
- # happened. Such a system could not be considered "sane".
- as_fn_error $? "ls -t appears to fail. Make sure there is not a broken
-alias in your environment" "$LINENO" 5
- fi
-
- test "$2" = conftest.file
- )
-then
- # Ok.
- :
-else
- as_fn_error $? "newly created file is older than distributed files!
-Check your system clock" "$LINENO" 5
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-test "$program_prefix" != NONE &&
- program_transform_name="s&^&$program_prefix&;$program_transform_name"
-# Use a double $ so make ignores it.
-test "$program_suffix" != NONE &&
- program_transform_name="s&\$&$program_suffix&;$program_transform_name"
-# Double any \ or $.
-# By default was `s,x,x', remove it if useless.
-ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
-program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
-
-# expand $ac_aux_dir to an absolute path
-am_aux_dir=`cd $ac_aux_dir && pwd`
-
-if test x"${MISSING+set}" != xset; then
- case $am_aux_dir in
- *\ * | *\ *)
- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
- *)
- MISSING="\${SHELL} $am_aux_dir/missing" ;;
- esac
-fi
-# Use eval to expand $SHELL
-if eval "$MISSING --run true"; then
- am_missing_run="$MISSING --run "
-else
- am_missing_run=
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5
-$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;}
-fi
-
-if test x"${install_sh}" != xset; then
- case $am_aux_dir in
- *\ * | *\ *)
- install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
- *)
- install_sh="\${SHELL} $am_aux_dir/install-sh"
- esac
-fi
-
-# Installed binaries are usually stripped using `strip' when the user
-# run `make install-strip'. However `strip' might not be the right
-# tool to use in cross-compilation environments, therefore Automake
-# will honor the `STRIP' environment variable to overrule this program.
-if test "$cross_compiling" != no; then
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$STRIP"; then
- ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_STRIP="${ac_tool_prefix}strip"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
- ac_ct_STRIP=$STRIP
- # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_STRIP"; then
- ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_STRIP="strip"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_STRIP" = x; then
- STRIP=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- STRIP=$ac_ct_STRIP
- fi
-else
- STRIP="$ac_cv_prog_STRIP"
-fi
-
-fi
-INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
-$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
-if test -z "$MKDIR_P"; then
- if ${ac_cv_path_mkdir+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_prog in mkdir gmkdir; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
- case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
- 'mkdir (GNU coreutils) '* | \
- 'mkdir (coreutils) '* | \
- 'mkdir (fileutils) '4.1*)
- ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
- break 3;;
- esac
- done
- done
- done
-IFS=$as_save_IFS
-
-fi
-
- test -d ./--version && rmdir ./--version
- if test "${ac_cv_path_mkdir+set}" = set; then
- MKDIR_P="$ac_cv_path_mkdir -p"
- else
- # As a last resort, use the slow shell script. Don't cache a
- # value for MKDIR_P within a source directory, because that will
- # break other packages using the cache if that directory is
- # removed, or if the value is a relative name.
- MKDIR_P="$ac_install_sh -d"
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
-$as_echo "$MKDIR_P" >&6; }
-
-mkdir_p="$MKDIR_P"
-case $mkdir_p in
- [\\/$]* | ?:[\\/]*) ;;
- */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
-esac
-
-for ac_prog in gawk mawk nawk awk
-do
- # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AWK+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$AWK"; then
- ac_cv_prog_AWK="$AWK" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_AWK="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-AWK=$ac_cv_prog_AWK
-if test -n "$AWK"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
-$as_echo "$AWK" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$AWK" && break
-done
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
-set x ${MAKE-make}
-ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat >conftest.make <<\_ACEOF
-SHELL = /bin/sh
-all:
- @echo '@@@%%%=$(MAKE)=@@@%%%'
-_ACEOF
-# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
-case `${MAKE-make} -f conftest.make 2>/dev/null` in
- *@@@%%%=?*=@@@%%%*)
- eval ac_cv_prog_make_${ac_make}_set=yes;;
- *)
- eval ac_cv_prog_make_${ac_make}_set=no;;
-esac
-rm -f conftest.make
-fi
-if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
- SET_MAKE=
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
- SET_MAKE="MAKE=${MAKE-make}"
-fi
-
-rm -rf .tst 2>/dev/null
-mkdir .tst 2>/dev/null
-if test -d .tst; then
- am__leading_dot=.
-else
- am__leading_dot=_
-fi
-rmdir .tst 2>/dev/null
-
-if test "`cd $srcdir && pwd`" != "`pwd`"; then
- # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
- # is not polluted with repeated "-I."
- am__isrc=' -I$(srcdir)'
- # test to see if srcdir already configured
- if test -f $srcdir/config.status; then
- as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
- fi
-fi
-
-# test whether we have cygpath
-if test -z "$CYGPATH_W"; then
- if (cygpath --version) >/dev/null 2>/dev/null; then
- CYGPATH_W='cygpath -w'
- else
- CYGPATH_W=echo
- fi
-fi
-
-
-# Define the identity of the package.
- PACKAGE='moses-compact-rule-table'
- VERSION='1.0'
-
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE "$PACKAGE"
-_ACEOF
-
-
-cat >>confdefs.h <<_ACEOF
-#define VERSION "$VERSION"
-_ACEOF
-
-# Some tools Automake needs.
-
-ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
-
-
-AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
-
-
-AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
-
-
-AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
-
-
-MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
-
-# We need awk for the "check" target. The system "awk" is bad on
-# some platforms.
-# Always define AMTAR for backward compatibility.
-
-AMTAR=${AMTAR-"${am_missing_run}tar"}
-
-am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'
-
-
-
-
-
-
-ac_config_headers="$ac_config_headers config.h"
-
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-
-# Checks for programs.
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-if test -z "$CXX"; then
- if test -n "$CCC"; then
- CXX=$CCC
- else
- if test -n "$ac_tool_prefix"; then
- for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
- do
- # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CXX"; then
- ac_cv_prog_CXX="$CXX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_CXX="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-CXX=$ac_cv_prog_CXX
-if test -n "$CXX"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5
-$as_echo "$CXX" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$CXX" && break
- done
-fi
-if test -z "$CXX"; then
- ac_ct_CXX=$CXX
- for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
-do
- # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_CXX"; then
- ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_CXX="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CXX=$ac_cv_prog_ac_ct_CXX
-if test -n "$ac_ct_CXX"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5
-$as_echo "$ac_ct_CXX" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$ac_ct_CXX" && break
-done
-
- if test "x$ac_ct_CXX" = x; then
- CXX="g++"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- CXX=$ac_ct_CXX
- fi
-fi
-
- fi
-fi
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
- { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compiler $ac_option >&5") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- sed '10a\
-... rest of stderr output deleted ...
- 10q' conftest.err >conftest.er1
- cat conftest.er1 >&5
- fi
- rm -f conftest.er1 conftest.err
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-done
-
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
-# Try to create an executable without -o first, disregard a.out.
-# It will help us diagnose broken compilers, and finding out an intuition
-# of exeext.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C++ compiler works" >&5
-$as_echo_n "checking whether the C++ compiler works... " >&6; }
-ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
-
-# The possible output files:
-ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
-
-ac_rmfiles=
-for ac_file in $ac_files
-do
- case $ac_file in
- *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
- * ) ac_rmfiles="$ac_rmfiles $ac_file";;
- esac
-done
-rm -f $ac_rmfiles
-
-if { { ac_try="$ac_link_default"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link_default") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then :
- # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
-# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
-# in a Makefile. We should not override ac_cv_exeext if it was cached,
-# so that the user can short-circuit this test for compilers unknown to
-# Autoconf.
-for ac_file in $ac_files ''
-do
- test -f "$ac_file" || continue
- case $ac_file in
- *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
- ;;
- [ab].out )
- # We found the default executable, but exeext='' is most
- # certainly right.
- break;;
- *.* )
- if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
- then :; else
- ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
- fi
- # We set ac_cv_exeext here because the later test for it is not
- # safe: cross compilers may not add the suffix if given an `-o'
- # argument, so we may need to know it at that point already.
- # Even if this section looks crufty: it has the advantage of
- # actually working.
- break;;
- * )
- break;;
- esac
-done
-test "$ac_cv_exeext" = no && ac_cv_exeext=
-
-else
- ac_file=''
-fi
-if test -z "$ac_file"; then :
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-$as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "C++ compiler cannot create executables
-See \`config.log' for more details" "$LINENO" 5; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler default output file name" >&5
-$as_echo_n "checking for C++ compiler default output file name... " >&6; }
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
-$as_echo "$ac_file" >&6; }
-ac_exeext=$ac_cv_exeext
-
-rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
-$as_echo_n "checking for suffix of executables... " >&6; }
-if { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then :
- # If both `conftest.exe' and `conftest' are `present' (well, observable)
-# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will
-# work properly (i.e., refer to `conftest.exe'), while it won't with
-# `rm'.
-for ac_file in conftest.exe conftest conftest.*; do
- test -f "$ac_file" || continue
- case $ac_file in
- *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
- *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
- break;;
- * ) break;;
- esac
-done
-else
- { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest conftest$ac_cv_exeext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
-$as_echo "$ac_cv_exeext" >&6; }
-
-rm -f conftest.$ac_ext
-EXEEXT=$ac_cv_exeext
-ac_exeext=$EXEEXT
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <stdio.h>
-int
-main ()
-{
-FILE *f = fopen ("conftest.out", "w");
- return ferror (f) || fclose (f) != 0;
-
- ;
- return 0;
-}
-_ACEOF
-ac_clean_files="$ac_clean_files conftest.out"
-# Check that the compiler produces executables we can run. If not, either
-# the compiler is broken, or we cross compile.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
-$as_echo_n "checking whether we are cross compiling... " >&6; }
-if test "$cross_compiling" != yes; then
- { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
- if { ac_try='./conftest$ac_cv_exeext'
- { { case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_try") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; }; then
- cross_compiling=no
- else
- if test "$cross_compiling" = maybe; then
- cross_compiling=yes
- else
- { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot run C++ compiled programs.
-If you meant to cross compile, use \`--host'.
-See \`config.log' for more details" "$LINENO" 5; }
- fi
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
-$as_echo "$cross_compiling" >&6; }
-
-rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
-$as_echo_n "checking for suffix of object files... " >&6; }
-if ${ac_cv_objext+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-rm -f conftest.o conftest.obj
-if { { ac_try="$ac_compile"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compile") 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then :
- for ac_file in conftest.o conftest.obj conftest.*; do
- test -f "$ac_file" || continue;
- case $ac_file in
- *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
- *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
- break;;
- esac
-done
-else
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest.$ac_cv_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
-$as_echo "$ac_cv_objext" >&6; }
-OBJEXT=$ac_cv_objext
-ac_objext=$OBJEXT
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5
-$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; }
-if ${ac_cv_cxx_compiler_gnu+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-#ifndef __GNUC__
- choke me
-#endif
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_compiler_gnu=yes
-else
- ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_cxx_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5
-$as_echo "$ac_cv_cxx_compiler_gnu" >&6; }
-if test $ac_compiler_gnu = yes; then
- GXX=yes
-else
- GXX=
-fi
-ac_test_CXXFLAGS=${CXXFLAGS+set}
-ac_save_CXXFLAGS=$CXXFLAGS
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5
-$as_echo_n "checking whether $CXX accepts -g... " >&6; }
-if ${ac_cv_prog_cxx_g+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_save_cxx_werror_flag=$ac_cxx_werror_flag
- ac_cxx_werror_flag=yes
- ac_cv_prog_cxx_g=no
- CXXFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_cv_prog_cxx_g=yes
-else
- CXXFLAGS=""
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
-
-else
- ac_cxx_werror_flag=$ac_save_cxx_werror_flag
- CXXFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_cv_prog_cxx_g=yes
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
- ac_cxx_werror_flag=$ac_save_cxx_werror_flag
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5
-$as_echo "$ac_cv_prog_cxx_g" >&6; }
-if test "$ac_test_CXXFLAGS" = set; then
- CXXFLAGS=$ac_save_CXXFLAGS
-elif test $ac_cv_prog_cxx_g = yes; then
- if test "$GXX" = yes; then
- CXXFLAGS="-g -O2"
- else
- CXXFLAGS="-g"
- fi
-else
- if test "$GXX" = yes; then
- CXXFLAGS="-O2"
- else
- CXXFLAGS=
- fi
-fi
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-DEPDIR="${am__leading_dot}deps"
-
-ac_config_commands="$ac_config_commands depfiles"
-
-
-am_make=${MAKE-make}
-cat > confinc << 'END'
-am__doit:
- @echo this is the am__doit target
-.PHONY: am__doit
-END
-# If we don't find an include directive, just comment out the code.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
-$as_echo_n "checking for style of include used by $am_make... " >&6; }
-am__include="#"
-am__quote=
-_am_result=none
-# First try GNU make style include.
-echo "include confinc" > confmf
-# Ignore all kinds of additional output from `make'.
-case `$am_make -s -f confmf 2> /dev/null` in #(
-*the\ am__doit\ target*)
- am__include=include
- am__quote=
- _am_result=GNU
- ;;
-esac
-# Now try BSD make style include.
-if test "$am__include" = "#"; then
- echo '.include "confinc"' > confmf
- case `$am_make -s -f confmf 2> /dev/null` in #(
- *the\ am__doit\ target*)
- am__include=.include
- am__quote="\""
- _am_result=BSD
- ;;
- esac
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
-$as_echo "$_am_result" >&6; }
-rm -f confinc confmf
-
-# Check whether --enable-dependency-tracking was given.
-if test "${enable_dependency_tracking+set}" = set; then :
- enableval=$enable_dependency_tracking;
-fi
-
-if test "x$enable_dependency_tracking" != xno; then
- am_depcomp="$ac_aux_dir/depcomp"
- AMDEPBACKSLASH='\'
-fi
- if test "x$enable_dependency_tracking" != xno; then
- AMDEP_TRUE=
- AMDEP_FALSE='#'
-else
- AMDEP_TRUE='#'
- AMDEP_FALSE=
-fi
-
-
-
-depcc="$CXX" am_compiler_list=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
-$as_echo_n "checking dependency style of $depcc... " >&6; }
-if ${am_cv_CXX_dependencies_compiler_type+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
- # We make a subdir and do the tests there. Otherwise we can end up
- # making bogus files that we don't know about and never remove. For
- # instance it was reported that on HP-UX the gcc test will end up
- # making a dummy file named `D' -- because `-MD' means `put the output
- # in D'.
- mkdir conftest.dir
- # Copy depcomp to subdir because otherwise we won't find it if we're
- # using a relative directory.
- cp "$am_depcomp" conftest.dir
- cd conftest.dir
- # We will build objects and dependencies in a subdirectory because
- # it helps to detect inapplicable dependency modes. For instance
- # both Tru64's cc and ICC support -MD to output dependencies as a
- # side effect of compilation, but ICC will put the dependencies in
- # the current directory while Tru64 will put them in the object
- # directory.
- mkdir sub
-
- am_cv_CXX_dependencies_compiler_type=none
- if test "$am_compiler_list" = ""; then
- am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
- fi
- am__universal=false
- case " $depcc " in #(
- *\ -arch\ *\ -arch\ *) am__universal=true ;;
- esac
-
- for depmode in $am_compiler_list; do
- # Setup a source with many dependencies, because some compilers
- # like to wrap large dependency lists on column 80 (with \), and
- # we should not choose a depcomp mode which is confused by this.
- #
- # We need to recreate these files for each test, as the compiler may
- # overwrite some of them when testing with obscure command lines.
- # This happens at least with the AIX C compiler.
- : > sub/conftest.c
- for i in 1 2 3 4 5 6; do
- echo '#include "conftst'$i'.h"' >> sub/conftest.c
- # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
- # Solaris 8's {/usr,}/bin/sh.
- touch sub/conftst$i.h
- done
- echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
- # We check with `-c' and `-o' for the sake of the "dashmstdout"
- # mode. It turns out that the SunPro C++ compiler does not properly
- # handle `-M -o', and we need to detect this. Also, some Intel
- # versions had trouble with output in subdirs
- am__obj=sub/conftest.${OBJEXT-o}
- am__minus_obj="-o $am__obj"
- case $depmode in
- gcc)
- # This depmode causes a compiler race in universal mode.
- test "$am__universal" = false || continue
- ;;
- nosideeffect)
- # after this tag, mechanisms are not by side-effect, so they'll
- # only be used when explicitly requested
- if test "x$enable_dependency_tracking" = xyes; then
- continue
- else
- break
- fi
- ;;
- msvisualcpp | msvcmsys)
- # This compiler won't grok `-c -o', but also, the minuso test has
- # not run yet. These depmodes are late enough in the game, and
- # so weak that their functioning should not be impacted.
- am__obj=conftest.${OBJEXT-o}
- am__minus_obj=
- ;;
- none) break ;;
- esac
- if depmode=$depmode \
- source=sub/conftest.c object=$am__obj \
- depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
- $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
- >/dev/null 2>conftest.err &&
- grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
- grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
- grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
- ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
- # icc doesn't choke on unknown options, it will just issue warnings
- # or remarks (even with -Werror). So we grep stderr for any message
- # that says an option was ignored or not supported.
- # When given -MP, icc 7.0 and 7.1 complain thusly:
- # icc: Command line warning: ignoring option '-M'; no argument required
- # The diagnosis changed in icc 8.0:
- # icc: Command line remark: option '-MP' not supported
- if (grep 'ignoring option' conftest.err ||
- grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
- am_cv_CXX_dependencies_compiler_type=$depmode
- break
- fi
- fi
- done
-
- cd ..
- rm -rf conftest.dir
-else
- am_cv_CXX_dependencies_compiler_type=none
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5
-$as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; }
-CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type
-
- if
- test "x$enable_dependency_tracking" != xno \
- && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then
- am__fastdepCXX_TRUE=
- am__fastdepCXX_FALSE='#'
-else
- am__fastdepCXX_TRUE='#'
- am__fastdepCXX_FALSE=
-fi
-
-
-
-case `pwd` in
- *\ * | *\ *)
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
-$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
-esac
-
-
-
-macro_version='2.2.6b'
-macro_revision='1.3017'
-
-
-
-
-
-
-
-
-
-
-
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-# Make sure we can run config.sub.
-$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
- as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
-$as_echo_n "checking build system type... " >&6; }
-if ${ac_cv_build+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_build_alias=$build_alias
-test "x$ac_build_alias" = x &&
- ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
-test "x$ac_build_alias" = x &&
- as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
-ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
- as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
-$as_echo "$ac_cv_build" >&6; }
-case $ac_cv_build in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
-esac
-build=$ac_cv_build
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_build
-shift
-build_cpu=$1
-build_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-build_os=$*
-IFS=$ac_save_IFS
-case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
-$as_echo_n "checking host system type... " >&6; }
-if ${ac_cv_host+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test "x$host_alias" = x; then
- ac_cv_host=$ac_cv_build
-else
- ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
- as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
-$as_echo "$ac_cv_host" >&6; }
-case $ac_cv_host in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
-esac
-host=$ac_cv_host
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_host
-shift
-host_cpu=$1
-host_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-host_os=$*
-IFS=$ac_save_IFS
-case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
-
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CC"; then
- ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_CC="${ac_tool_prefix}gcc"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_CC"; then
- ac_ct_CC=$CC
- # Extract the first word of "gcc", so it can be a program name with args.
-set dummy gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_CC"; then
- ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_CC="gcc"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_CC" = x; then
- CC=""
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- CC=$ac_ct_CC
- fi
-else
- CC="$ac_cv_prog_CC"
-fi
-
-if test -z "$CC"; then
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CC"; then
- ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_CC="${ac_tool_prefix}cc"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- fi
-fi
-if test -z "$CC"; then
- # Extract the first word of "cc", so it can be a program name with args.
-set dummy cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CC"; then
- ac_cv_prog_CC="$CC" # Let the user override the test.
-else
- ac_prog_rejected=no
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
- ac_prog_rejected=yes
- continue
- fi
- ac_cv_prog_CC="cc"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-if test $ac_prog_rejected = yes; then
- # We found a bogon in the path, so make sure we never use it.
- set dummy $ac_cv_prog_CC
- shift
- if test $# != 0; then
- # We chose a different compiler from the bogus one.
- # However, it has the same basename, so the bogon will be chosen
- # first if we set CC to just the basename; use the full file name.
- shift
- ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
- fi
-fi
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$CC"; then
- if test -n "$ac_tool_prefix"; then
- for ac_prog in cl.exe
- do
- # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CC"; then
- ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$CC" && break
- done
-fi
-if test -z "$CC"; then
- ac_ct_CC=$CC
- for ac_prog in cl.exe
-do
- # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_CC"; then
- ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_CC="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$ac_ct_CC" && break
-done
-
- if test "x$ac_ct_CC" = x; then
- CC=""
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- CC=$ac_ct_CC
- fi
-fi
-
-fi
-
-
-test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "no acceptable C compiler found in \$PATH
-See \`config.log' for more details" "$LINENO" 5; }
-
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
- { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compiler $ac_option >&5") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- sed '10a\
-... rest of stderr output deleted ...
- 10q' conftest.err >conftest.er1
- cat conftest.er1 >&5
- fi
- rm -f conftest.er1 conftest.err
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-done
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
-$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
-if ${ac_cv_c_compiler_gnu+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-#ifndef __GNUC__
- choke me
-#endif
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
- ac_compiler_gnu=yes
-else
- ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_c_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
-$as_echo "$ac_cv_c_compiler_gnu" >&6; }
-if test $ac_compiler_gnu = yes; then
- GCC=yes
-else
- GCC=
-fi
-ac_test_CFLAGS=${CFLAGS+set}
-ac_save_CFLAGS=$CFLAGS
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
-$as_echo_n "checking whether $CC accepts -g... " >&6; }
-if ${ac_cv_prog_cc_g+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_save_c_werror_flag=$ac_c_werror_flag
- ac_c_werror_flag=yes
- ac_cv_prog_cc_g=no
- CFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
- ac_cv_prog_cc_g=yes
-else
- CFLAGS=""
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-
-else
- ac_c_werror_flag=$ac_save_c_werror_flag
- CFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
- ac_cv_prog_cc_g=yes
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
- ac_c_werror_flag=$ac_save_c_werror_flag
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
-$as_echo "$ac_cv_prog_cc_g" >&6; }
-if test "$ac_test_CFLAGS" = set; then
- CFLAGS=$ac_save_CFLAGS
-elif test $ac_cv_prog_cc_g = yes; then
- if test "$GCC" = yes; then
- CFLAGS="-g -O2"
- else
- CFLAGS="-g"
- fi
-else
- if test "$GCC" = yes; then
- CFLAGS="-O2"
- else
- CFLAGS=
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
-$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
-if ${ac_cv_prog_cc_c89+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_cv_prog_cc_c89=no
-ac_save_CC=$CC
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <stdarg.h>
-#include <stdio.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */
-struct buf { int x; };
-FILE * (*rcsopen) (struct buf *, struct stat *, int);
-static char *e (p, i)
- char **p;
- int i;
-{
- return p[i];
-}
-static char *f (char * (*g) (char **, int), char **p, ...)
-{
- char *s;
- va_list v;
- va_start (v,p);
- s = g (p, va_arg (v,int));
- va_end (v);
- return s;
-}
-
-/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has
- function prototypes and stuff, but not '\xHH' hex character constants.
- These don't provoke an error unfortunately, instead are silently treated
- as 'x'. The following induces an error, until -std is added to get
- proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an
- array size at least. It's necessary to write '\x00'==0 to get something
- that's true only with -std. */
-int osf4_cc_array ['\x00' == 0 ? 1 : -1];
-
-/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
- inside strings and character constants. */
-#define FOO(x) 'x'
-int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
-
-int test (int i, double x);
-struct s1 {int (*f) (int a);};
-struct s2 {int (*f) (double a);};
-int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
-int argc;
-char **argv;
-int
-main ()
-{
-return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1];
- ;
- return 0;
-}
-_ACEOF
-for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
- -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
-do
- CC="$ac_save_CC $ac_arg"
- if ac_fn_c_try_compile "$LINENO"; then :
- ac_cv_prog_cc_c89=$ac_arg
-fi
-rm -f core conftest.err conftest.$ac_objext
- test "x$ac_cv_prog_cc_c89" != "xno" && break
-done
-rm -f conftest.$ac_ext
-CC=$ac_save_CC
-
-fi
-# AC_CACHE_VAL
-case "x$ac_cv_prog_cc_c89" in
- x)
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-$as_echo "none needed" >&6; } ;;
- xno)
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-$as_echo "unsupported" >&6; } ;;
- *)
- CC="$CC $ac_cv_prog_cc_c89"
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
-$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
-esac
-if test "x$ac_cv_prog_cc_c89" != xno; then :
-
-fi
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-depcc="$CC" am_compiler_list=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
-$as_echo_n "checking dependency style of $depcc... " >&6; }
-if ${am_cv_CC_dependencies_compiler_type+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
- # We make a subdir and do the tests there. Otherwise we can end up
- # making bogus files that we don't know about and never remove. For
- # instance it was reported that on HP-UX the gcc test will end up
- # making a dummy file named `D' -- because `-MD' means `put the output
- # in D'.
- mkdir conftest.dir
- # Copy depcomp to subdir because otherwise we won't find it if we're
- # using a relative directory.
- cp "$am_depcomp" conftest.dir
- cd conftest.dir
- # We will build objects and dependencies in a subdirectory because
- # it helps to detect inapplicable dependency modes. For instance
- # both Tru64's cc and ICC support -MD to output dependencies as a
- # side effect of compilation, but ICC will put the dependencies in
- # the current directory while Tru64 will put them in the object
- # directory.
- mkdir sub
-
- am_cv_CC_dependencies_compiler_type=none
- if test "$am_compiler_list" = ""; then
- am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
- fi
- am__universal=false
- case " $depcc " in #(
- *\ -arch\ *\ -arch\ *) am__universal=true ;;
- esac
-
- for depmode in $am_compiler_list; do
- # Setup a source with many dependencies, because some compilers
- # like to wrap large dependency lists on column 80 (with \), and
- # we should not choose a depcomp mode which is confused by this.
- #
- # We need to recreate these files for each test, as the compiler may
- # overwrite some of them when testing with obscure command lines.
- # This happens at least with the AIX C compiler.
- : > sub/conftest.c
- for i in 1 2 3 4 5 6; do
- echo '#include "conftst'$i'.h"' >> sub/conftest.c
- # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
- # Solaris 8's {/usr,}/bin/sh.
- touch sub/conftst$i.h
- done
- echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
- # We check with `-c' and `-o' for the sake of the "dashmstdout"
- # mode. It turns out that the SunPro C++ compiler does not properly
- # handle `-M -o', and we need to detect this. Also, some Intel
- # versions had trouble with output in subdirs
- am__obj=sub/conftest.${OBJEXT-o}
- am__minus_obj="-o $am__obj"
- case $depmode in
- gcc)
- # This depmode causes a compiler race in universal mode.
- test "$am__universal" = false || continue
- ;;
- nosideeffect)
- # after this tag, mechanisms are not by side-effect, so they'll
- # only be used when explicitly requested
- if test "x$enable_dependency_tracking" = xyes; then
- continue
- else
- break
- fi
- ;;
- msvisualcpp | msvcmsys)
- # This compiler won't grok `-c -o', but also, the minuso test has
- # not run yet. These depmodes are late enough in the game, and
- # so weak that their functioning should not be impacted.
- am__obj=conftest.${OBJEXT-o}
- am__minus_obj=
- ;;
- none) break ;;
- esac
- if depmode=$depmode \
- source=sub/conftest.c object=$am__obj \
- depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
- $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
- >/dev/null 2>conftest.err &&
- grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
- grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
- grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
- ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
- # icc doesn't choke on unknown options, it will just issue warnings
- # or remarks (even with -Werror). So we grep stderr for any message
- # that says an option was ignored or not supported.
- # When given -MP, icc 7.0 and 7.1 complain thusly:
- # icc: Command line warning: ignoring option '-M'; no argument required
- # The diagnosis changed in icc 8.0:
- # icc: Command line remark: option '-MP' not supported
- if (grep 'ignoring option' conftest.err ||
- grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
- am_cv_CC_dependencies_compiler_type=$depmode
- break
- fi
- fi
- done
-
- cd ..
- rm -rf conftest.dir
-else
- am_cv_CC_dependencies_compiler_type=none
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
-$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
-CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
-
- if
- test "x$enable_dependency_tracking" != xno \
- && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
- am__fastdepCC_TRUE=
- am__fastdepCC_FALSE='#'
-else
- am__fastdepCC_TRUE='#'
- am__fastdepCC_FALSE=
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
-$as_echo_n "checking for a sed that does not truncate output... " >&6; }
-if ${ac_cv_path_SED+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
- for ac_i in 1 2 3 4 5 6 7; do
- ac_script="$ac_script$as_nl$ac_script"
- done
- echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
- { ac_script=; unset ac_script;}
- if test -z "$SED"; then
- ac_path_SED_found=false
- # Loop through the user's path and test for each of PROGNAME-LIST
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_prog in sed gsed; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
- { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
-# Check for GNU ac_path_SED and select it if it is found.
- # Check for GNU $ac_path_SED
-case `"$ac_path_SED" --version 2>&1` in
-*GNU*)
- ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
-*)
- ac_count=0
- $as_echo_n 0123456789 >"conftest.in"
- while :
- do
- cat "conftest.in" "conftest.in" >"conftest.tmp"
- mv "conftest.tmp" "conftest.in"
- cp "conftest.in" "conftest.nl"
- $as_echo '' >> "conftest.nl"
- "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
- diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
- as_fn_arith $ac_count + 1 && ac_count=$as_val
- if test $ac_count -gt ${ac_path_SED_max-0}; then
- # Best one so far, save it but keep looking for a better one
- ac_cv_path_SED="$ac_path_SED"
- ac_path_SED_max=$ac_count
- fi
- # 10*(2^10) chars as input seems more than enough
- test $ac_count -gt 10 && break
- done
- rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
- $ac_path_SED_found && break 3
- done
- done
- done
-IFS=$as_save_IFS
- if test -z "$ac_cv_path_SED"; then
- as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
- fi
-else
- ac_cv_path_SED=$SED
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
-$as_echo "$ac_cv_path_SED" >&6; }
- SED="$ac_cv_path_SED"
- rm -f conftest.sed
-
-test -z "$SED" && SED=sed
-Xsed="$SED -e 1s/^X//"
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
-$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
-if ${ac_cv_path_GREP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$GREP"; then
- ac_path_GREP_found=false
- # Loop through the user's path and test for each of PROGNAME-LIST
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_prog in grep ggrep; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
- { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
-# Check for GNU ac_path_GREP and select it if it is found.
- # Check for GNU $ac_path_GREP
-case `"$ac_path_GREP" --version 2>&1` in
-*GNU*)
- ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
-*)
- ac_count=0
- $as_echo_n 0123456789 >"conftest.in"
- while :
- do
- cat "conftest.in" "conftest.in" >"conftest.tmp"
- mv "conftest.tmp" "conftest.in"
- cp "conftest.in" "conftest.nl"
- $as_echo 'GREP' >> "conftest.nl"
- "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
- diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
- as_fn_arith $ac_count + 1 && ac_count=$as_val
- if test $ac_count -gt ${ac_path_GREP_max-0}; then
- # Best one so far, save it but keep looking for a better one
- ac_cv_path_GREP="$ac_path_GREP"
- ac_path_GREP_max=$ac_count
- fi
- # 10*(2^10) chars as input seems more than enough
- test $ac_count -gt 10 && break
- done
- rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
- $ac_path_GREP_found && break 3
- done
- done
- done
-IFS=$as_save_IFS
- if test -z "$ac_cv_path_GREP"; then
- as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
- fi
-else
- ac_cv_path_GREP=$GREP
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
-$as_echo "$ac_cv_path_GREP" >&6; }
- GREP="$ac_cv_path_GREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
-$as_echo_n "checking for egrep... " >&6; }
-if ${ac_cv_path_EGREP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
- then ac_cv_path_EGREP="$GREP -E"
- else
- if test -z "$EGREP"; then
- ac_path_EGREP_found=false
- # Loop through the user's path and test for each of PROGNAME-LIST
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_prog in egrep; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
- { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
-# Check for GNU ac_path_EGREP and select it if it is found.
- # Check for GNU $ac_path_EGREP
-case `"$ac_path_EGREP" --version 2>&1` in
-*GNU*)
- ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
-*)
- ac_count=0
- $as_echo_n 0123456789 >"conftest.in"
- while :
- do
- cat "conftest.in" "conftest.in" >"conftest.tmp"
- mv "conftest.tmp" "conftest.in"
- cp "conftest.in" "conftest.nl"
- $as_echo 'EGREP' >> "conftest.nl"
- "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
- diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
- as_fn_arith $ac_count + 1 && ac_count=$as_val
- if test $ac_count -gt ${ac_path_EGREP_max-0}; then
- # Best one so far, save it but keep looking for a better one
- ac_cv_path_EGREP="$ac_path_EGREP"
- ac_path_EGREP_max=$ac_count
- fi
- # 10*(2^10) chars as input seems more than enough
- test $ac_count -gt 10 && break
- done
- rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
- $ac_path_EGREP_found && break 3
- done
- done
- done
-IFS=$as_save_IFS
- if test -z "$ac_cv_path_EGREP"; then
- as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
- fi
-else
- ac_cv_path_EGREP=$EGREP
-fi
-
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
-$as_echo "$ac_cv_path_EGREP" >&6; }
- EGREP="$ac_cv_path_EGREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
-$as_echo_n "checking for fgrep... " >&6; }
-if ${ac_cv_path_FGREP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
- then ac_cv_path_FGREP="$GREP -F"
- else
- if test -z "$FGREP"; then
- ac_path_FGREP_found=false
- # Loop through the user's path and test for each of PROGNAME-LIST
- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_prog in fgrep; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
- { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
-# Check for GNU ac_path_FGREP and select it if it is found.
- # Check for GNU $ac_path_FGREP
-case `"$ac_path_FGREP" --version 2>&1` in
-*GNU*)
- ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
-*)
- ac_count=0
- $as_echo_n 0123456789 >"conftest.in"
- while :
- do
- cat "conftest.in" "conftest.in" >"conftest.tmp"
- mv "conftest.tmp" "conftest.in"
- cp "conftest.in" "conftest.nl"
- $as_echo 'FGREP' >> "conftest.nl"
- "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
- diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
- as_fn_arith $ac_count + 1 && ac_count=$as_val
- if test $ac_count -gt ${ac_path_FGREP_max-0}; then
- # Best one so far, save it but keep looking for a better one
- ac_cv_path_FGREP="$ac_path_FGREP"
- ac_path_FGREP_max=$ac_count
- fi
- # 10*(2^10) chars as input seems more than enough
- test $ac_count -gt 10 && break
- done
- rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
- $ac_path_FGREP_found && break 3
- done
- done
- done
-IFS=$as_save_IFS
- if test -z "$ac_cv_path_FGREP"; then
- as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
- fi
-else
- ac_cv_path_FGREP=$FGREP
-fi
-
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
-$as_echo "$ac_cv_path_FGREP" >&6; }
- FGREP="$ac_cv_path_FGREP"
-
-
-test -z "$GREP" && GREP=grep
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-gnu-ld was given.
-if test "${with_gnu_ld+set}" = set; then :
- withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
-else
- with_gnu_ld=no
-fi
-
-ac_prog=ld
-if test "$GCC" = yes; then
- # Check if gcc -print-prog-name=ld gives a path.
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
-$as_echo_n "checking for ld used by $CC... " >&6; }
- case $host in
- *-*-mingw*)
- # gcc leaves a trailing carriage return which upsets mingw
- ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
- *)
- ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
- esac
- case $ac_prog in
- # Accept absolute paths.
- [\\/]* | ?:[\\/]*)
- re_direlt='/[^/][^/]*/\.\./'
- # Canonicalize the pathname of ld
- ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
- while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
- ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
- done
- test -z "$LD" && LD="$ac_prog"
- ;;
- "")
- # If it fails, then pretend we aren't using GCC.
- ac_prog=ld
- ;;
- *)
- # If it is relative, then search for the first ld in PATH.
- with_gnu_ld=unknown
- ;;
- esac
-elif test "$with_gnu_ld" = yes; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
-$as_echo_n "checking for GNU ld... " >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
-$as_echo_n "checking for non-GNU ld... " >&6; }
-fi
-if ${lt_cv_path_LD+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$LD"; then
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
- lt_cv_path_LD="$ac_dir/$ac_prog"
- # Check to see if the program is GNU ld. I'd rather use --version,
- # but apparently some variants of GNU ld only accept -v.
- # Break only if it was the GNU/non-GNU ld that we prefer.
- case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
- *GNU* | *'with BFD'*)
- test "$with_gnu_ld" != no && break
- ;;
- *)
- test "$with_gnu_ld" != yes && break
- ;;
- esac
- fi
- done
- IFS="$lt_save_ifs"
-else
- lt_cv_path_LD="$LD" # Let the user override the test with a path.
-fi
-fi
-
-LD="$lt_cv_path_LD"
-if test -n "$LD"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
-$as_echo "$LD" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
-$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
-if ${lt_cv_prog_gnu_ld+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- # I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
- lt_cv_prog_gnu_ld=yes
- ;;
-*)
- lt_cv_prog_gnu_ld=no
- ;;
-esac
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
-$as_echo "$lt_cv_prog_gnu_ld" >&6; }
-with_gnu_ld=$lt_cv_prog_gnu_ld
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
-$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
-if ${lt_cv_path_NM+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$NM"; then
- # Let the user override the test.
- lt_cv_path_NM="$NM"
-else
- lt_nm_to_check="${ac_tool_prefix}nm"
- if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
- lt_nm_to_check="$lt_nm_to_check nm"
- fi
- for lt_tmp_nm in $lt_nm_to_check; do
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- tmp_nm="$ac_dir/$lt_tmp_nm"
- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
- # Check to see if the nm accepts a BSD-compat flag.
- # Adding the `sed 1q' prevents false positives on HP-UX, which says:
- # nm: unknown option "B" ignored
- # Tru64's nm complains that /dev/null is an invalid object file
- case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
- */dev/null* | *'Invalid file or object type'*)
- lt_cv_path_NM="$tmp_nm -B"
- break
- ;;
- *)
- case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
- */dev/null*)
- lt_cv_path_NM="$tmp_nm -p"
- break
- ;;
- *)
- lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
- continue # so that we can try to find one that supports BSD flags
- ;;
- esac
- ;;
- esac
- fi
- done
- IFS="$lt_save_ifs"
- done
- : ${lt_cv_path_NM=no}
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
-$as_echo "$lt_cv_path_NM" >&6; }
-if test "$lt_cv_path_NM" != "no"; then
- NM="$lt_cv_path_NM"
-else
- # Didn't find any BSD compatible name lister, look for dumpbin.
- if test -n "$ac_tool_prefix"; then
- for ac_prog in "dumpbin -symbols" "link -dump -symbols"
- do
- # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DUMPBIN+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$DUMPBIN"; then
- ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-DUMPBIN=$ac_cv_prog_DUMPBIN
-if test -n "$DUMPBIN"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
-$as_echo "$DUMPBIN" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$DUMPBIN" && break
- done
-fi
-if test -z "$DUMPBIN"; then
- ac_ct_DUMPBIN=$DUMPBIN
- for ac_prog in "dumpbin -symbols" "link -dump -symbols"
-do
- # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_DUMPBIN"; then
- ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
-if test -n "$ac_ct_DUMPBIN"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
-$as_echo "$ac_ct_DUMPBIN" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$ac_ct_DUMPBIN" && break
-done
-
- if test "x$ac_ct_DUMPBIN" = x; then
- DUMPBIN=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- DUMPBIN=$ac_ct_DUMPBIN
- fi
-fi
-
-
- if test "$DUMPBIN" != ":"; then
- NM="$DUMPBIN"
- fi
-fi
-test -z "$NM" && NM=nm
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
-$as_echo_n "checking the name lister ($NM) interface... " >&6; }
-if ${lt_cv_nm_interface+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_nm_interface="BSD nm"
- echo "int some_variable = 0;" > conftest.$ac_ext
- (eval echo "\"\$as_me:5078: $ac_compile\"" >&5)
- (eval "$ac_compile" 2>conftest.err)
- cat conftest.err >&5
- (eval echo "\"\$as_me:5081: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
- (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
- cat conftest.err >&5
- (eval echo "\"\$as_me:5084: output\"" >&5)
- cat conftest.out >&5
- if $GREP 'External.*some_variable' conftest.out > /dev/null; then
- lt_cv_nm_interface="MS dumpbin"
- fi
- rm -f conftest*
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
-$as_echo "$lt_cv_nm_interface" >&6; }
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
-$as_echo_n "checking whether ln -s works... " >&6; }
-LN_S=$as_ln_s
-if test "$LN_S" = "ln -s"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
-$as_echo "no, using $LN_S" >&6; }
-fi
-
-# find the maximum length of command line arguments
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
-$as_echo_n "checking the maximum length of command line arguments... " >&6; }
-if ${lt_cv_sys_max_cmd_len+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- i=0
- teststring="ABCD"
-
- case $build_os in
- msdosdjgpp*)
- # On DJGPP, this test can blow up pretty badly due to problems in libc
- # (any single argument exceeding 2000 bytes causes a buffer overrun
- # during glob expansion). Even if it were fixed, the result of this
- # check would be larger than it should be.
- lt_cv_sys_max_cmd_len=12288; # 12K is about right
- ;;
-
- gnu*)
- # Under GNU Hurd, this test is not required because there is
- # no limit to the length of command line arguments.
- # Libtool will interpret -1 as no limit whatsoever
- lt_cv_sys_max_cmd_len=-1;
- ;;
-
- cygwin* | mingw* | cegcc*)
- # On Win9x/ME, this test blows up -- it succeeds, but takes
- # about 5 minutes as the teststring grows exponentially.
- # Worse, since 9x/ME are not pre-emptively multitasking,
- # you end up with a "frozen" computer, even though with patience
- # the test eventually succeeds (with a max line length of 256k).
- # Instead, let's just punt: use the minimum linelength reported by
- # all of the supported platforms: 8192 (on NT/2K/XP).
- lt_cv_sys_max_cmd_len=8192;
- ;;
-
- amigaos*)
- # On AmigaOS with pdksh, this test takes hours, literally.
- # So we just punt and use a minimum line length of 8192.
- lt_cv_sys_max_cmd_len=8192;
- ;;
-
- netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
- # This has been around since 386BSD, at least. Likely further.
- if test -x /sbin/sysctl; then
- lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
- elif test -x /usr/sbin/sysctl; then
- lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
- else
- lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs
- fi
- # And add a safety zone
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
- ;;
-
- interix*)
- # We know the value 262144 and hardcode it with a safety zone (like BSD)
- lt_cv_sys_max_cmd_len=196608
- ;;
-
- osf*)
- # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
- # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
- # nice to cause kernel panics so lets avoid the loop below.
- # First set a reasonable default.
- lt_cv_sys_max_cmd_len=16384
- #
- if test -x /sbin/sysconfig; then
- case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
- *1*) lt_cv_sys_max_cmd_len=-1 ;;
- esac
- fi
- ;;
- sco3.2v5*)
- lt_cv_sys_max_cmd_len=102400
- ;;
- sysv5* | sco5v6* | sysv4.2uw2*)
- kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
- if test -n "$kargmax"; then
- lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'`
- else
- lt_cv_sys_max_cmd_len=32768
- fi
- ;;
- *)
- lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
- if test -n "$lt_cv_sys_max_cmd_len"; then
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
- else
- # Make teststring a little bigger before we do anything with it.
- # a 1K string should be a reasonable start.
- for i in 1 2 3 4 5 6 7 8 ; do
- teststring=$teststring$teststring
- done
- SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
- # If test is not a shell built-in, we'll probably end up computing a
- # maximum length that is only half of the actual maximum length, but
- # we can't tell.
- while { test "X"`$SHELL $0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \
- = "XX$teststring$teststring"; } >/dev/null 2>&1 &&
- test $i != 17 # 1/2 MB should be enough
- do
- i=`expr $i + 1`
- teststring=$teststring$teststring
- done
- # Only check the string length outside the loop.
- lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
- teststring=
- # Add a significant safety factor because C++ compilers can tack on
- # massive amounts of additional arguments before passing them to the
- # linker. It appears as though 1/2 is a usable value.
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
- fi
- ;;
- esac
-
-fi
-
-if test -n $lt_cv_sys_max_cmd_len ; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
-$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
-$as_echo "none" >&6; }
-fi
-max_cmd_len=$lt_cv_sys_max_cmd_len
-
-
-
-
-
-
-: ${CP="cp -f"}
-: ${MV="mv -f"}
-: ${RM="rm -f"}
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
-$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
-# Try some XSI features
-xsi_shell=no
-( _lt_dummy="a/b/c"
- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
- = c,a/b,, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
-$as_echo "$xsi_shell" >&6; }
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
-$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
-lt_shell_append=no
-( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
- >/dev/null 2>&1 \
- && lt_shell_append=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
-$as_echo "$lt_shell_append" >&6; }
-
-
-if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
- lt_unset=unset
-else
- lt_unset=false
-fi
-
-
-
-
-
-# test EBCDIC or ASCII
-case `echo X|tr X '\101'` in
- A) # ASCII based system
- # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
- lt_SP2NL='tr \040 \012'
- lt_NL2SP='tr \015\012 \040\040'
- ;;
- *) # EBCDIC based system
- lt_SP2NL='tr \100 \n'
- lt_NL2SP='tr \r\n \100\100'
- ;;
-esac
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
-$as_echo_n "checking for $LD option to reload object files... " >&6; }
-if ${lt_cv_ld_reload_flag+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_ld_reload_flag='-r'
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
-$as_echo "$lt_cv_ld_reload_flag" >&6; }
-reload_flag=$lt_cv_ld_reload_flag
-case $reload_flag in
-"" | " "*) ;;
-*) reload_flag=" $reload_flag" ;;
-esac
-reload_cmds='$LD$reload_flag -o $output$reload_objs'
-case $host_os in
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
- else
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- fi
- ;;
-esac
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
-set dummy ${ac_tool_prefix}objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJDUMP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$OBJDUMP"; then
- ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-OBJDUMP=$ac_cv_prog_OBJDUMP
-if test -n "$OBJDUMP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
-$as_echo "$OBJDUMP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OBJDUMP"; then
- ac_ct_OBJDUMP=$OBJDUMP
- # Extract the first word of "objdump", so it can be a program name with args.
-set dummy objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_OBJDUMP"; then
- ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_OBJDUMP="objdump"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
-if test -n "$ac_ct_OBJDUMP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
-$as_echo "$ac_ct_OBJDUMP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_OBJDUMP" = x; then
- OBJDUMP="false"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- OBJDUMP=$ac_ct_OBJDUMP
- fi
-else
- OBJDUMP="$ac_cv_prog_OBJDUMP"
-fi
-
-test -z "$OBJDUMP" && OBJDUMP=objdump
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
-$as_echo_n "checking how to recognize dependent libraries... " >&6; }
-if ${lt_cv_deplibs_check_method+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_file_magic_cmd='$MAGIC_CMD'
-lt_cv_file_magic_test_file=
-lt_cv_deplibs_check_method='unknown'
-# Need to set the preceding variable on all platforms that support
-# interlibrary dependencies.
-# 'none' -- dependencies not supported.
-# `unknown' -- same as none, but documents that we really don't know.
-# 'pass_all' -- all dependencies passed with no checks.
-# 'test_compile' -- check by making test program.
-# 'file_magic [[regex]]' -- check by looking for files in library path
-# which responds to the $file_magic_cmd with a given extended regex.
-# If you have `file' or equivalent on your system and you're not sure
-# whether `pass_all' will *always* work, you probably want this one.
-
-case $host_os in
-aix[4-9]*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-beos*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-bsdi[45]*)
- lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
- lt_cv_file_magic_cmd='/usr/bin/file -L'
- lt_cv_file_magic_test_file=/shlib/libc.so
- ;;
-
-cygwin*)
- # func_win32_libid is a shell function defined in ltmain.sh
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- ;;
-
-mingw* | pw32*)
- # Base MSYS/MinGW do not provide the 'file' command needed by
- # func_win32_libid shell function, so use a weaker test based on 'objdump',
- # unless we find 'file', for example because we are cross-compiling.
- if ( file / ) >/dev/null 2>&1; then
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-
-cegcc)
- # use the weaker test based on 'objdump'. See mingw*.
- lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- ;;
-
-darwin* | rhapsody*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-freebsd* | dragonfly*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
- case $host_cpu in
- i*86 )
- # Not sure whether the presence of OpenBSD here was a mistake.
- # Let's accept both of them until this is cleared up.
- lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
- lt_cv_file_magic_cmd=/usr/bin/file
- lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
- ;;
- esac
- else
- lt_cv_deplibs_check_method=pass_all
- fi
- ;;
-
-gnu*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-hpux10.20* | hpux11*)
- lt_cv_file_magic_cmd=/usr/bin/file
- case $host_cpu in
- ia64*)
- lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
- lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
- ;;
- hppa*64*)
- lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]'
- lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
- ;;
- *)
- lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9].[0-9]) shared library'
- lt_cv_file_magic_test_file=/usr/lib/libc.sl
- ;;
- esac
- ;;
-
-interix[3-9]*)
- # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
- lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
- ;;
-
-irix5* | irix6* | nonstopux*)
- case $LD in
- *-32|*"-32 ") libmagic=32-bit;;
- *-n32|*"-n32 ") libmagic=N32;;
- *-64|*"-64 ") libmagic=64-bit;;
- *) libmagic=never-match;;
- esac
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-# This must be Linux ELF.
-linux* | k*bsd*-gnu)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
- lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
- else
- lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
- fi
- ;;
-
-newos6*)
- lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
- lt_cv_file_magic_cmd=/usr/bin/file
- lt_cv_file_magic_test_file=/usr/lib/libnls.so
- ;;
-
-*nto* | *qnx*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-openbsd*)
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
- else
- lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
- fi
- ;;
-
-osf3* | osf4* | osf5*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-rdos*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-solaris*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-sysv4 | sysv4.3*)
- case $host_vendor in
- motorola)
- lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
- lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
- ;;
- ncr)
- lt_cv_deplibs_check_method=pass_all
- ;;
- sequent)
- lt_cv_file_magic_cmd='/bin/file'
- lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
- ;;
- sni)
- lt_cv_file_magic_cmd='/bin/file'
- lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
- lt_cv_file_magic_test_file=/lib/libc.so
- ;;
- siemens)
- lt_cv_deplibs_check_method=pass_all
- ;;
- pc)
- lt_cv_deplibs_check_method=pass_all
- ;;
- esac
- ;;
-
-tpf*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-esac
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
-$as_echo "$lt_cv_deplibs_check_method" >&6; }
-file_magic_cmd=$lt_cv_file_magic_cmd
-deplibs_check_method=$lt_cv_deplibs_check_method
-test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
-set dummy ${ac_tool_prefix}ar; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AR+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$AR"; then
- ac_cv_prog_AR="$AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_AR="${ac_tool_prefix}ar"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-AR=$ac_cv_prog_AR
-if test -n "$AR"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
-$as_echo "$AR" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_AR"; then
- ac_ct_AR=$AR
- # Extract the first word of "ar", so it can be a program name with args.
-set dummy ar; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_AR+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_AR"; then
- ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_AR="ar"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_AR=$ac_cv_prog_ac_ct_AR
-if test -n "$ac_ct_AR"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
-$as_echo "$ac_ct_AR" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- AR=$ac_ct_AR
- fi
-else
- AR="$ac_cv_prog_AR"
-fi
-
-test -z "$AR" && AR=ar
-test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$STRIP"; then
- ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_STRIP="${ac_tool_prefix}strip"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
- ac_ct_STRIP=$STRIP
- # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_STRIP"; then
- ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_STRIP="strip"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_STRIP" = x; then
- STRIP=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- STRIP=$ac_ct_STRIP
- fi
-else
- STRIP="$ac_cv_prog_STRIP"
-fi
-
-test -z "$STRIP" && STRIP=:
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
-set dummy ${ac_tool_prefix}ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_RANLIB+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$RANLIB"; then
- ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-RANLIB=$ac_cv_prog_RANLIB
-if test -n "$RANLIB"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
-$as_echo "$RANLIB" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_RANLIB"; then
- ac_ct_RANLIB=$RANLIB
- # Extract the first word of "ranlib", so it can be a program name with args.
-set dummy ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_RANLIB"; then
- ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_RANLIB="ranlib"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
-if test -n "$ac_ct_RANLIB"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
-$as_echo "$ac_ct_RANLIB" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_RANLIB" = x; then
- RANLIB=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- RANLIB=$ac_ct_RANLIB
- fi
-else
- RANLIB="$ac_cv_prog_RANLIB"
-fi
-
-test -z "$RANLIB" && RANLIB=:
-
-
-
-
-
-
-# Determine commands to create old-style static archives.
-old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
-old_postinstall_cmds='chmod 644 $oldlib'
-old_postuninstall_cmds=
-
-if test -n "$RANLIB"; then
- case $host_os in
- openbsd*)
- old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib"
- ;;
- *)
- old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib"
- ;;
- esac
- old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
-# Check for command to grab the raw symbol name followed by C symbol from nm.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
-$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
-if ${lt_cv_sys_global_symbol_pipe+:} false; then :
- $as_echo_n "(cached) " >&6
-else
-
-# These are sane defaults that work on at least a few old systems.
-# [They come from Ultrix. What could be older than Ultrix?!! ;)]
-
-# Character class describing NM global symbol codes.
-symcode='[BCDEGRST]'
-
-# Regexp to match symbols that can be accessed directly from C.
-sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
-
-# Define system-specific variables.
-case $host_os in
-aix*)
- symcode='[BCDT]'
- ;;
-cygwin* | mingw* | pw32* | cegcc*)
- symcode='[ABCDGISTW]'
- ;;
-hpux*)
- if test "$host_cpu" = ia64; then
- symcode='[ABCDEGRST]'
- fi
- ;;
-irix* | nonstopux*)
- symcode='[BCDEGRST]'
- ;;
-osf*)
- symcode='[BCDEGQRST]'
- ;;
-solaris*)
- symcode='[BDRT]'
- ;;
-sco3.2v5*)
- symcode='[DT]'
- ;;
-sysv4.2uw2*)
- symcode='[DT]'
- ;;
-sysv5* | sco5v6* | unixware* | OpenUNIX*)
- symcode='[ABDT]'
- ;;
-sysv4)
- symcode='[DFNSTU]'
- ;;
-esac
-
-# If we're using GNU nm, then use its standard symbol codes.
-case `$NM -V 2>&1` in
-*GNU* | *'with BFD'*)
- symcode='[ABCDGIRSTW]' ;;
-esac
-
-# Transform an extracted symbol line into a proper C declaration.
-# Some systems (esp. on ia64) link data and code symbols differently,
-# so use this general approach.
-lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
-# Transform an extracted symbol line into symbol name and symbol address
-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
-# Handle CRLF in mingw tool chain
-opt_cr=
-case $build_os in
-mingw*)
- opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
- ;;
-esac
-
-# Try without a prefix underscore, then with it.
-for ac_symprfx in "" "_"; do
-
- # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
- symxfrm="\\1 $ac_symprfx\\2 \\2"
-
- # Write the raw and C identifiers.
- if test "$lt_cv_nm_interface" = "MS dumpbin"; then
- # Fake it for dumpbin and say T for any non-static function
- # and D for any global variable.
- # Also find C++ and __fastcall symbols from MSVC++,
- # which start with @ or ?.
- lt_cv_sys_global_symbol_pipe="$AWK '"\
-" {last_section=section; section=\$ 3};"\
-" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
-" \$ 0!~/External *\|/{next};"\
-" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
-" {if(hide[section]) next};"\
-" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
-" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
-" s[1]~/^[@?]/{print s[1], s[1]; next};"\
-" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
-" ' prfx=^$ac_symprfx"
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-
- rm -f conftest*
- cat > conftest.$ac_ext <<_LT_EOF
-#ifdef __cplusplus
-extern "C" {
-#endif
-char nm_test_var;
-void nm_test_func(void);
-void nm_test_func(void){}
-#ifdef __cplusplus
-}
-#endif
-int main(){nm_test_var='a';nm_test_func();return(0);}
-_LT_EOF
-
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- # Now try to grab the symbols.
- nlist=conftest.nm
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist\""; } >&5
- (eval $NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && test -s "$nlist"; then
- # Try sorting and uniquifying the output.
- if sort "$nlist" | uniq > "$nlist"T; then
- mv -f "$nlist"T "$nlist"
- else
- rm -f "$nlist"T
- fi
-
- # Make sure that we snagged all the symbols we need.
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-_LT_EOF
- # Now generate the symbol file.
- eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
-
- cat <<_LT_EOF >> conftest.$ac_ext
-
-/* The mapping between symbol names and symbols. */
-const struct {
- const char *name;
- void *address;
-}
-lt__PROGRAM__LTX_preloaded_symbols[] =
-{
- { "@PROGRAM@", (void *) 0 },
-_LT_EOF
- $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
- cat <<\_LT_EOF >> conftest.$ac_ext
- {0, (void *) 0}
-};
-
-/* This works around a problem in FreeBSD linker */
-#ifdef FREEBSD_WORKAROUND
-static const void *lt_preloaded_setup() {
- return lt__PROGRAM__LTX_preloaded_symbols;
-}
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-_LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
- lt_save_LIBS="$LIBS"
- lt_save_CFLAGS="$CFLAGS"
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
- (eval $ac_link) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
- LIBS="$lt_save_LIBS"
- CFLAGS="$lt_save_CFLAGS"
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
- else
- echo "cannot find nm_test_var in $nlist" >&5
- fi
- else
- echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
- fi
- else
- echo "$progname: failed program was:" >&5
- cat conftest.$ac_ext >&5
- fi
- rm -rf conftest* conftst*
-
- # Do not use the global_symbol_pipe unless it works.
- if test "$pipe_works" = yes; then
- break
- else
- lt_cv_sys_global_symbol_pipe=
- fi
-done
-
-fi
-
-if test -z "$lt_cv_sys_global_symbol_pipe"; then
- lt_cv_sys_global_symbol_to_cdecl=
-fi
-if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
-$as_echo "failed" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
-$as_echo "ok" >&6; }
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Check whether --enable-libtool-lock was given.
-if test "${enable_libtool_lock+set}" = set; then :
- enableval=$enable_libtool_lock;
-fi
-
-test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
-
-# Some flags need to be propagated to the compiler or linker for good
-# libtool support.
-case $host in
-ia64-*-hpux*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- case `/usr/bin/file conftest.$ac_objext` in
- *ELF-32*)
- HPUX_IA64_MODE="32"
- ;;
- *ELF-64*)
- HPUX_IA64_MODE="64"
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-*-*-irix6*)
- # Find out which ABI we are using.
- echo '#line 6290 "configure"' > conftest.$ac_ext
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- if test "$lt_cv_prog_gnu_ld" = yes; then
- case `/usr/bin/file conftest.$ac_objext` in
- *32-bit*)
- LD="${LD-ld} -melf32bsmip"
- ;;
- *N32*)
- LD="${LD-ld} -melf32bmipn32"
- ;;
- *64-bit*)
- LD="${LD-ld} -melf64bmip"
- ;;
- esac
- else
- case `/usr/bin/file conftest.$ac_objext` in
- *32-bit*)
- LD="${LD-ld} -32"
- ;;
- *N32*)
- LD="${LD-ld} -n32"
- ;;
- *64-bit*)
- LD="${LD-ld} -64"
- ;;
- esac
- fi
- fi
- rm -rf conftest*
- ;;
-
-x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
-s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- case `/usr/bin/file conftest.o` in
- *32-bit*)
- case $host in
- x86_64-*kfreebsd*-gnu)
- LD="${LD-ld} -m elf_i386_fbsd"
- ;;
- x86_64-*linux*)
- LD="${LD-ld} -m elf_i386"
- ;;
- ppc64-*linux*|powerpc64-*linux*)
- LD="${LD-ld} -m elf32ppclinux"
- ;;
- s390x-*linux*)
- LD="${LD-ld} -m elf_s390"
- ;;
- sparc64-*linux*)
- LD="${LD-ld} -m elf32_sparc"
- ;;
- esac
- ;;
- *64-bit*)
- case $host in
- x86_64-*kfreebsd*-gnu)
- LD="${LD-ld} -m elf_x86_64_fbsd"
- ;;
- x86_64-*linux*)
- LD="${LD-ld} -m elf_x86_64"
- ;;
- ppc*-*linux*|powerpc*-*linux*)
- LD="${LD-ld} -m elf64ppc"
- ;;
- s390*-*linux*|s390*-*tpf*)
- LD="${LD-ld} -m elf64_s390"
- ;;
- sparc*-*linux*)
- LD="${LD-ld} -m elf64_sparc"
- ;;
- esac
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-
-*-*-sco3.2v5*)
- # On SCO OpenServer 5, we need -belf to get full-featured binaries.
- SAVE_CFLAGS="$CFLAGS"
- CFLAGS="$CFLAGS -belf"
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
-$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
-if ${lt_cv_cc_needs_belf+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- lt_cv_cc_needs_belf=yes
-else
- lt_cv_cc_needs_belf=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
- ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
-$as_echo "$lt_cv_cc_needs_belf" >&6; }
- if test x"$lt_cv_cc_needs_belf" != x"yes"; then
- # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
- CFLAGS="$SAVE_CFLAGS"
- fi
- ;;
-sparc*-*solaris*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- case `/usr/bin/file conftest.o` in
- *64-bit*)
- case $lt_cv_prog_gnu_ld in
- yes*) LD="${LD-ld} -m elf64_sparc" ;;
- *)
- if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
- LD="${LD-ld} -64"
- fi
- ;;
- esac
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-esac
-
-need_locks="$enable_libtool_lock"
-
-
- case $host_os in
- rhapsody* | darwin*)
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
-set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DSYMUTIL+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$DSYMUTIL"; then
- ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-DSYMUTIL=$ac_cv_prog_DSYMUTIL
-if test -n "$DSYMUTIL"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
-$as_echo "$DSYMUTIL" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_DSYMUTIL"; then
- ac_ct_DSYMUTIL=$DSYMUTIL
- # Extract the first word of "dsymutil", so it can be a program name with args.
-set dummy dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_DSYMUTIL"; then
- ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
-if test -n "$ac_ct_DSYMUTIL"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
-$as_echo "$ac_ct_DSYMUTIL" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_DSYMUTIL" = x; then
- DSYMUTIL=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- DSYMUTIL=$ac_ct_DSYMUTIL
- fi
-else
- DSYMUTIL="$ac_cv_prog_DSYMUTIL"
-fi
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
-set dummy ${ac_tool_prefix}nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_NMEDIT+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$NMEDIT"; then
- ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-NMEDIT=$ac_cv_prog_NMEDIT
-if test -n "$NMEDIT"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
-$as_echo "$NMEDIT" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_NMEDIT"; then
- ac_ct_NMEDIT=$NMEDIT
- # Extract the first word of "nmedit", so it can be a program name with args.
-set dummy nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_NMEDIT"; then
- ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_NMEDIT="nmedit"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
-if test -n "$ac_ct_NMEDIT"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
-$as_echo "$ac_ct_NMEDIT" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_NMEDIT" = x; then
- NMEDIT=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- NMEDIT=$ac_ct_NMEDIT
- fi
-else
- NMEDIT="$ac_cv_prog_NMEDIT"
-fi
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
-set dummy ${ac_tool_prefix}lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LIPO+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$LIPO"; then
- ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-LIPO=$ac_cv_prog_LIPO
-if test -n "$LIPO"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
-$as_echo "$LIPO" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_LIPO"; then
- ac_ct_LIPO=$LIPO
- # Extract the first word of "lipo", so it can be a program name with args.
-set dummy lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_LIPO"; then
- ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_LIPO="lipo"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
-if test -n "$ac_ct_LIPO"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
-$as_echo "$ac_ct_LIPO" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_LIPO" = x; then
- LIPO=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- LIPO=$ac_ct_LIPO
- fi
-else
- LIPO="$ac_cv_prog_LIPO"
-fi
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$OTOOL"; then
- ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL=$ac_cv_prog_OTOOL
-if test -n "$OTOOL"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
-$as_echo "$OTOOL" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL"; then
- ac_ct_OTOOL=$OTOOL
- # Extract the first word of "otool", so it can be a program name with args.
-set dummy otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_OTOOL"; then
- ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_OTOOL="otool"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
-if test -n "$ac_ct_OTOOL"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
-$as_echo "$ac_ct_OTOOL" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_OTOOL" = x; then
- OTOOL=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- OTOOL=$ac_ct_OTOOL
- fi
-else
- OTOOL="$ac_cv_prog_OTOOL"
-fi
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL64+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$OTOOL64"; then
- ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL64=$ac_cv_prog_OTOOL64
-if test -n "$OTOOL64"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
-$as_echo "$OTOOL64" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL64"; then
- ac_ct_OTOOL64=$OTOOL64
- # Extract the first word of "otool64", so it can be a program name with args.
-set dummy otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_OTOOL64"; then
- ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_OTOOL64="otool64"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
-if test -n "$ac_ct_OTOOL64"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
-$as_echo "$ac_ct_OTOOL64" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
- if test "x$ac_ct_OTOOL64" = x; then
- OTOOL64=":"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- OTOOL64=$ac_ct_OTOOL64
- fi
-else
- OTOOL64="$ac_cv_prog_OTOOL64"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
-$as_echo_n "checking for -single_module linker flag... " >&6; }
-if ${lt_cv_apple_cc_single_mod+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_apple_cc_single_mod=no
- if test -z "${LT_MULTI_MODULE}"; then
- # By default we will add the -single_module flag. You can override
- # by either setting the environment variable LT_MULTI_MODULE
- # non-empty at configure time, or by adding -multi_module to the
- # link flags.
- rm -rf libconftest.dylib*
- echo "int foo(void){return 1;}" > conftest.c
- echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
--dynamiclib -Wl,-single_module conftest.c" >&5
- $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
- -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
- _lt_result=$?
- if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then
- lt_cv_apple_cc_single_mod=yes
- else
- cat conftest.err >&5
- fi
- rm -rf libconftest.dylib*
- rm -f conftest.*
- fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
-$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
-$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
-if ${lt_cv_ld_exported_symbols_list+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_ld_exported_symbols_list=no
- save_LDFLAGS=$LDFLAGS
- echo "_main" > conftest.sym
- LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- lt_cv_ld_exported_symbols_list=yes
-else
- lt_cv_ld_exported_symbols_list=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
- LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
-$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
- case $host_os in
- rhapsody* | darwin1.[012])
- _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
- darwin1.*)
- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
- darwin*) # darwin 5.x on
- # if running on 10.5 or later, the deployment target defaults
- # to the OS version, if on x86, and 10.4, the deployment
- # target defaults to 10.4. Don't you love it?
- case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
- 10.0,*86*-darwin8*|10.0,*-darwin[91]*)
- _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
- 10.[012]*)
- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
- 10.*)
- _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
- esac
- ;;
- esac
- if test "$lt_cv_apple_cc_single_mod" = "yes"; then
- _lt_dar_single_mod='$single_module'
- fi
- if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
- _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
- else
- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
- fi
- if test "$DSYMUTIL" != ":"; then
- _lt_dsymutil='~$DSYMUTIL $lib || :'
- else
- _lt_dsymutil=
- fi
- ;;
- esac
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
-$as_echo_n "checking how to run the C preprocessor... " >&6; }
-# On Suns, sometimes $CPP names a directory.
-if test -n "$CPP" && test -d "$CPP"; then
- CPP=
-fi
-if test -z "$CPP"; then
- if ${ac_cv_prog_CPP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- # Double quotes because CPP needs to be expanded
- for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
- do
- ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
- # Use a header file that comes with gcc, so configuring glibc
- # with a fresh cross-compiler works.
- # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
- # <limits.h> exists even on freestanding compilers.
- # On the NeXT, cc -E runs the code through the compiler's parser,
- # not just through cpp. "Syntax error" is here to catch this case.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
- Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
- # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
- # OK, works on sane cases. Now check whether nonexistent headers
- # can be detected and how.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
- # Broken: success on invalid input.
-continue
-else
- # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
- break
-fi
-
- done
- ac_cv_prog_CPP=$CPP
-
-fi
- CPP=$ac_cv_prog_CPP
-else
- ac_cv_prog_CPP=$CPP
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
-$as_echo "$CPP" >&6; }
-ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
- # Use a header file that comes with gcc, so configuring glibc
- # with a fresh cross-compiler works.
- # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
- # <limits.h> exists even on freestanding compilers.
- # On the NeXT, cc -E runs the code through the compiler's parser,
- # not just through cpp. "Syntax error" is here to catch this case.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
- Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
- # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
- # OK, works on sane cases. Now check whether nonexistent headers
- # can be detected and how.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
- # Broken: success on invalid input.
-continue
-else
- # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-
-else
- { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
-$as_echo_n "checking for ANSI C header files... " >&6; }
-if ${ac_cv_header_stdc+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <stdlib.h>
-#include <stdarg.h>
-#include <string.h>
-#include <float.h>
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
- ac_cv_header_stdc=yes
-else
- ac_cv_header_stdc=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-if test $ac_cv_header_stdc = yes; then
- # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <string.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
- $EGREP "memchr" >/dev/null 2>&1; then :
-
-else
- ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
- # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <stdlib.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
- $EGREP "free" >/dev/null 2>&1; then :
-
-else
- ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
- # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
- if test "$cross_compiling" = yes; then :
- :
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <ctype.h>
-#include <stdlib.h>
-#if ((' ' & 0x0FF) == 0x020)
-# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
-# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
-#else
-# define ISLOWER(c) \
- (('a' <= (c) && (c) <= 'i') \
- || ('j' <= (c) && (c) <= 'r') \
- || ('s' <= (c) && (c) <= 'z'))
-# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
-#endif
-
-#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
-int
-main ()
-{
- int i;
- for (i = 0; i < 256; i++)
- if (XOR (islower (i), ISLOWER (i))
- || toupper (i) != TOUPPER (i))
- return 2;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_run "$LINENO"; then :
-
-else
- ac_cv_header_stdc=no
-fi
-rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
- conftest.$ac_objext conftest.beam conftest.$ac_ext
-fi
-
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
-$as_echo "$ac_cv_header_stdc" >&6; }
-if test $ac_cv_header_stdc = yes; then
-
-$as_echo "#define STDC_HEADERS 1" >>confdefs.h
-
-fi
-
-# On IRIX 5.3, sys/types and inttypes.h are conflicting.
-for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
- inttypes.h stdint.h unistd.h
-do :
- as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
-ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
-"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
- cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-
-done
-
-
-for ac_header in dlfcn.h
-do :
- ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
-"
-if test "x$ac_cv_header_dlfcn_h" = xyes; then :
- cat >>confdefs.h <<_ACEOF
-#define HAVE_DLFCN_H 1
-_ACEOF
-
-fi
-
-done
-
-
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-if test -z "$CXX"; then
- if test -n "$CCC"; then
- CXX=$CCC
- else
- if test -n "$ac_tool_prefix"; then
- for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
- do
- # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$CXX"; then
- ac_cv_prog_CXX="$CXX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_CXX="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-CXX=$ac_cv_prog_CXX
-if test -n "$CXX"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5
-$as_echo "$CXX" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$CXX" && break
- done
-fi
-if test -z "$CXX"; then
- ac_ct_CXX=$CXX
- for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
-do
- # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -n "$ac_ct_CXX"; then
- ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
- ac_cv_prog_ac_ct_CXX="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CXX=$ac_cv_prog_ac_ct_CXX
-if test -n "$ac_ct_CXX"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5
-$as_echo "$ac_ct_CXX" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- test -n "$ac_ct_CXX" && break
-done
-
- if test "x$ac_ct_CXX" = x; then
- CXX="g++"
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- CXX=$ac_ct_CXX
- fi
-fi
-
- fi
-fi
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
- { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_compiler $ac_option >&5") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- sed '10a\
-... rest of stderr output deleted ...
- 10q' conftest.err >conftest.er1
- cat conftest.er1 >&5
- fi
- rm -f conftest.er1 conftest.err
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-done
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5
-$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; }
-if ${ac_cv_cxx_compiler_gnu+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-#ifndef __GNUC__
- choke me
-#endif
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_compiler_gnu=yes
-else
- ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_cxx_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5
-$as_echo "$ac_cv_cxx_compiler_gnu" >&6; }
-if test $ac_compiler_gnu = yes; then
- GXX=yes
-else
- GXX=
-fi
-ac_test_CXXFLAGS=${CXXFLAGS+set}
-ac_save_CXXFLAGS=$CXXFLAGS
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5
-$as_echo_n "checking whether $CXX accepts -g... " >&6; }
-if ${ac_cv_prog_cxx_g+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_save_cxx_werror_flag=$ac_cxx_werror_flag
- ac_cxx_werror_flag=yes
- ac_cv_prog_cxx_g=no
- CXXFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_cv_prog_cxx_g=yes
-else
- CXXFLAGS=""
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
-
-else
- ac_cxx_werror_flag=$ac_save_cxx_werror_flag
- CXXFLAGS="-g"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_cv_prog_cxx_g=yes
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
- ac_cxx_werror_flag=$ac_save_cxx_werror_flag
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5
-$as_echo "$ac_cv_prog_cxx_g" >&6; }
-if test "$ac_test_CXXFLAGS" = set; then
- CXXFLAGS=$ac_save_CXXFLAGS
-elif test $ac_cv_prog_cxx_g = yes; then
- if test "$GXX" = yes; then
- CXXFLAGS="-g -O2"
- else
- CXXFLAGS="-g"
- fi
-else
- if test "$GXX" = yes; then
- CXXFLAGS="-O2"
- else
- CXXFLAGS=
- fi
-fi
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-depcc="$CXX" am_compiler_list=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
-$as_echo_n "checking dependency style of $depcc... " >&6; }
-if ${am_cv_CXX_dependencies_compiler_type+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
- # We make a subdir and do the tests there. Otherwise we can end up
- # making bogus files that we don't know about and never remove. For
- # instance it was reported that on HP-UX the gcc test will end up
- # making a dummy file named `D' -- because `-MD' means `put the output
- # in D'.
- mkdir conftest.dir
- # Copy depcomp to subdir because otherwise we won't find it if we're
- # using a relative directory.
- cp "$am_depcomp" conftest.dir
- cd conftest.dir
- # We will build objects and dependencies in a subdirectory because
- # it helps to detect inapplicable dependency modes. For instance
- # both Tru64's cc and ICC support -MD to output dependencies as a
- # side effect of compilation, but ICC will put the dependencies in
- # the current directory while Tru64 will put them in the object
- # directory.
- mkdir sub
-
- am_cv_CXX_dependencies_compiler_type=none
- if test "$am_compiler_list" = ""; then
- am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
- fi
- am__universal=false
- case " $depcc " in #(
- *\ -arch\ *\ -arch\ *) am__universal=true ;;
- esac
-
- for depmode in $am_compiler_list; do
- # Setup a source with many dependencies, because some compilers
- # like to wrap large dependency lists on column 80 (with \), and
- # we should not choose a depcomp mode which is confused by this.
- #
- # We need to recreate these files for each test, as the compiler may
- # overwrite some of them when testing with obscure command lines.
- # This happens at least with the AIX C compiler.
- : > sub/conftest.c
- for i in 1 2 3 4 5 6; do
- echo '#include "conftst'$i'.h"' >> sub/conftest.c
- # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
- # Solaris 8's {/usr,}/bin/sh.
- touch sub/conftst$i.h
- done
- echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
- # We check with `-c' and `-o' for the sake of the "dashmstdout"
- # mode. It turns out that the SunPro C++ compiler does not properly
- # handle `-M -o', and we need to detect this. Also, some Intel
- # versions had trouble with output in subdirs
- am__obj=sub/conftest.${OBJEXT-o}
- am__minus_obj="-o $am__obj"
- case $depmode in
- gcc)
- # This depmode causes a compiler race in universal mode.
- test "$am__universal" = false || continue
- ;;
- nosideeffect)
- # after this tag, mechanisms are not by side-effect, so they'll
- # only be used when explicitly requested
- if test "x$enable_dependency_tracking" = xyes; then
- continue
- else
- break
- fi
- ;;
- msvisualcpp | msvcmsys)
- # This compiler won't grok `-c -o', but also, the minuso test has
- # not run yet. These depmodes are late enough in the game, and
- # so weak that their functioning should not be impacted.
- am__obj=conftest.${OBJEXT-o}
- am__minus_obj=
- ;;
- none) break ;;
- esac
- if depmode=$depmode \
- source=sub/conftest.c object=$am__obj \
- depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
- $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
- >/dev/null 2>conftest.err &&
- grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
- grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
- grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
- ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
- # icc doesn't choke on unknown options, it will just issue warnings
- # or remarks (even with -Werror). So we grep stderr for any message
- # that says an option was ignored or not supported.
- # When given -MP, icc 7.0 and 7.1 complain thusly:
- # icc: Command line warning: ignoring option '-M'; no argument required
- # The diagnosis changed in icc 8.0:
- # icc: Command line remark: option '-MP' not supported
- if (grep 'ignoring option' conftest.err ||
- grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
- am_cv_CXX_dependencies_compiler_type=$depmode
- break
- fi
- fi
- done
-
- cd ..
- rm -rf conftest.dir
-else
- am_cv_CXX_dependencies_compiler_type=none
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5
-$as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; }
-CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type
-
- if
- test "x$enable_dependency_tracking" != xno \
- && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then
- am__fastdepCXX_TRUE=
- am__fastdepCXX_FALSE='#'
-else
- am__fastdepCXX_TRUE='#'
- am__fastdepCXX_FALSE=
-fi
-
-
-if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
- ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
- (test "X$CXX" != "Xg++"))) ; then
- ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5
-$as_echo_n "checking how to run the C++ preprocessor... " >&6; }
-if test -z "$CXXCPP"; then
- if ${ac_cv_prog_CXXCPP+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- # Double quotes because CXXCPP needs to be expanded
- for CXXCPP in "$CXX -E" "/lib/cpp"
- do
- ac_preproc_ok=false
-for ac_cxx_preproc_warn_flag in '' yes
-do
- # Use a header file that comes with gcc, so configuring glibc
- # with a fresh cross-compiler works.
- # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
- # <limits.h> exists even on freestanding compilers.
- # On the NeXT, cc -E runs the code through the compiler's parser,
- # not just through cpp. "Syntax error" is here to catch this case.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
- Syntax error
-_ACEOF
-if ac_fn_cxx_try_cpp "$LINENO"; then :
-
-else
- # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
- # OK, works on sane cases. Now check whether nonexistent headers
- # can be detected and how.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_cxx_try_cpp "$LINENO"; then :
- # Broken: success on invalid input.
-continue
-else
- # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
- break
-fi
-
- done
- ac_cv_prog_CXXCPP=$CXXCPP
-
-fi
- CXXCPP=$ac_cv_prog_CXXCPP
-else
- ac_cv_prog_CXXCPP=$CXXCPP
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5
-$as_echo "$CXXCPP" >&6; }
-ac_preproc_ok=false
-for ac_cxx_preproc_warn_flag in '' yes
-do
- # Use a header file that comes with gcc, so configuring glibc
- # with a fresh cross-compiler works.
- # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
- # <limits.h> exists even on freestanding compilers.
- # On the NeXT, cc -E runs the code through the compiler's parser,
- # not just through cpp. "Syntax error" is here to catch this case.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
- Syntax error
-_ACEOF
-if ac_fn_cxx_try_cpp "$LINENO"; then :
-
-else
- # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
- # OK, works on sane cases. Now check whether nonexistent headers
- # can be detected and how.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_cxx_try_cpp "$LINENO"; then :
- # Broken: success on invalid input.
-continue
-else
- # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-
-else
- { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-_lt_caught_CXX_error=yes; }
-fi
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-else
- _lt_caught_CXX_error=yes
-fi
-
-
-
-
-
-# Set options
-
-
-
- enable_dlopen=no
-
-
- enable_win32_dll=no
-
-
- # Check whether --enable-shared was given.
-if test "${enable_shared+set}" = set; then :
- enableval=$enable_shared; p=${PACKAGE-default}
- case $enableval in
- yes) enable_shared=yes ;;
- no) enable_shared=no ;;
- *)
- enable_shared=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_shared=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac
-else
- enable_shared=yes
-fi
-
-
-
-
-
-
-
-
-
- # Check whether --enable-static was given.
-if test "${enable_static+set}" = set; then :
- enableval=$enable_static; p=${PACKAGE-default}
- case $enableval in
- yes) enable_static=yes ;;
- no) enable_static=no ;;
- *)
- enable_static=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_static=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac
-else
- enable_static=yes
-fi
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-pic was given.
-if test "${with_pic+set}" = set; then :
- withval=$with_pic; pic_mode="$withval"
-else
- pic_mode=default
-fi
-
-
-test -z "$pic_mode" && pic_mode=default
-
-
-
-
-
-
-
- # Check whether --enable-fast-install was given.
-if test "${enable_fast_install+set}" = set; then :
- enableval=$enable_fast_install; p=${PACKAGE-default}
- case $enableval in
- yes) enable_fast_install=yes ;;
- no) enable_fast_install=no ;;
- *)
- enable_fast_install=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_fast_install=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac
-else
- enable_fast_install=yes
-fi
-
-
-
-
-
-
-
-
-
-
-
-# This can be used to rebuild libtool when needed
-LIBTOOL_DEPS="$ltmain"
-
-# Always use our own libtool.
-LIBTOOL='$(SHELL) $(top_builddir)/libtool'
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-test -z "$LN_S" && LN_S="ln -s"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
-$as_echo_n "checking for objdir... " >&6; }
-if ${lt_cv_objdir+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- rm -f .libs 2>/dev/null
-mkdir .libs 2>/dev/null
-if test -d .libs; then
- lt_cv_objdir=.libs
-else
- # MS-DOS does not allow filenames that begin with a dot.
- lt_cv_objdir=_libs
-fi
-rmdir .libs 2>/dev/null
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
-$as_echo "$lt_cv_objdir" >&6; }
-objdir=$lt_cv_objdir
-
-
-
-
-
-cat >>confdefs.h <<_ACEOF
-#define LT_OBJDIR "$lt_cv_objdir/"
-_ACEOF
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-case $host_os in
-aix3*)
- # AIX sometimes has problems with the GCC collect2 program. For some
- # reason, if we set the COLLECT_NAMES environment variable, the problems
- # vanish in a puff of smoke.
- if test "X${COLLECT_NAMES+set}" != Xset; then
- COLLECT_NAMES=
- export COLLECT_NAMES
- fi
- ;;
-esac
-
-# Sed substitution that helps us do robust quoting. It backslashifies
-# metacharacters that are still active within double-quoted strings.
-sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
-
-# Same as above, but do not quote variable references.
-double_quote_subst='s/\(["`\\]\)/\\\1/g'
-
-# Sed substitution to delay expansion of an escaped shell variable in a
-# double_quote_subst'ed string.
-delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
-
-# Sed substitution to delay expansion of an escaped single quote.
-delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
-
-# Sed substitution to avoid accidental globbing in evaled expressions
-no_glob_subst='s/\*/\\\*/g'
-
-# Global variables:
-ofile=libtool
-can_build_shared=yes
-
-# All known linkers require a `.a' archive for static linking (except MSVC,
-# which needs '.lib').
-libext=a
-
-with_gnu_ld="$lt_cv_prog_gnu_ld"
-
-old_CC="$CC"
-old_CFLAGS="$CFLAGS"
-
-# Set sane defaults for various variables
-test -z "$CC" && CC=cc
-test -z "$LTCC" && LTCC=$CC
-test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
-test -z "$LD" && LD=ld
-test -z "$ac_objext" && ac_objext=o
-
-for cc_temp in $compiler""; do
- case $cc_temp in
- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
- \-*) ;;
- *) break;;
- esac
-done
-cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"`
-
-
-# Only perform the check for file, if the check method requires it
-test -z "$MAGIC_CMD" && MAGIC_CMD=file
-case $deplibs_check_method in
-file_magic*)
- if test "$file_magic_cmd" = '$MAGIC_CMD'; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
-$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- case $MAGIC_CMD in
-[\\/*] | ?:[\\/]*)
- lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
- ;;
-*)
- lt_save_MAGIC_CMD="$MAGIC_CMD"
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
- for ac_dir in $ac_dummy; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f $ac_dir/${ac_tool_prefix}file; then
- lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
- if test -n "$file_magic_test_file"; then
- case $deplibs_check_method in
- "file_magic "*)
- file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
- MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
- if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
- $EGREP "$file_magic_regex" > /dev/null; then
- :
- else
- cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such. This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem. Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
- fi ;;
- esac
- fi
- break
- fi
- done
- IFS="$lt_save_ifs"
- MAGIC_CMD="$lt_save_MAGIC_CMD"
- ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-
-
-
-if test -z "$lt_cv_path_MAGIC_CMD"; then
- if test -n "$ac_tool_prefix"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
-$as_echo_n "checking for file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- case $MAGIC_CMD in
-[\\/*] | ?:[\\/]*)
- lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
- ;;
-*)
- lt_save_MAGIC_CMD="$MAGIC_CMD"
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
- for ac_dir in $ac_dummy; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f $ac_dir/file; then
- lt_cv_path_MAGIC_CMD="$ac_dir/file"
- if test -n "$file_magic_test_file"; then
- case $deplibs_check_method in
- "file_magic "*)
- file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
- MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
- if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
- $EGREP "$file_magic_regex" > /dev/null; then
- :
- else
- cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such. This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem. Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
- fi ;;
- esac
- fi
- break
- fi
- done
- IFS="$lt_save_ifs"
- MAGIC_CMD="$lt_save_MAGIC_CMD"
- ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
- else
- MAGIC_CMD=:
- fi
-fi
-
- fi
- ;;
-esac
-
-# Use C for the default configuration in the libtool script
-
-lt_save_CC="$CC"
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-# Source file extension for C test sources.
-ac_ext=c
-
-# Object file extension for compiled C test sources.
-objext=o
-objext=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code="int some_variable = 0;"
-
-# Code to be used in simple link tests
-lt_simple_link_test_code='int main(){return(0);}'
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-# Save the default compiler, since it gets overwritten when the other
-# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
-compiler_DEFAULT=$CC
-
-# save warnings/boilerplate of simple test code
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
-if test -n "$compiler"; then
-
-lt_prog_compiler_no_builtin_flag=
-
-if test "$GCC" = yes; then
- lt_prog_compiler_no_builtin_flag=' -fno-builtin'
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
-$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
-if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_rtti_exceptions=no
- ac_outfile=conftest.$ac_objext
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
- lt_compiler_flag="-fno-rtti -fno-exceptions"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- # The option is referenced via a variable to avoid confusing sed.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:8346: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>conftest.err)
- ac_status=$?
- cat conftest.err >&5
- echo "$as_me:8350: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s "$ac_outfile"; then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings other than the usual output.
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_rtti_exceptions=yes
- fi
- fi
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
-$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
-
-if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
- lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
-else
- :
-fi
-
-fi
-
-
-
-
-
-
- lt_prog_compiler_wl=
-lt_prog_compiler_pic=
-lt_prog_compiler_static=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_static='-static'
-
- case $host_os in
- aix*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- lt_prog_compiler_static='-Bstatic'
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- lt_prog_compiler_pic='-fPIC'
- ;;
- m68k)
- # FIXME: we need at least 68020 code to build shared libraries, but
- # adding the `-m68020' flag to GCC prevents building anything better,
- # like `-m68040'.
- lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
- ;;
- esac
- ;;
-
- beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
- # PIC is the default for these OSes.
- ;;
-
- mingw* | cygwin* | pw32* | os2* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- # Although the cygwin gcc ignores -fPIC, still need this for old-style
- # (--disable-auto-import) libraries
- lt_prog_compiler_pic='-DDLL_EXPORT'
- ;;
-
- darwin* | rhapsody*)
- # PIC is the default on this platform
- # Common symbols not allowed in MH_DYLIB files
- lt_prog_compiler_pic='-fno-common'
- ;;
-
- hpux*)
- # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
- # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
- # sets the default TLS model and affects inlining.
- case $host_cpu in
- hppa*64*)
- # +Z the default
- ;;
- *)
- lt_prog_compiler_pic='-fPIC'
- ;;
- esac
- ;;
-
- interix[3-9]*)
- # Interix 3.x gcc -fpic/-fPIC options generate broken code.
- # Instead, we relocate shared libraries at runtime.
- ;;
-
- msdosdjgpp*)
- # Just because we use GCC doesn't mean we suddenly get shared libraries
- # on systems that don't support them.
- lt_prog_compiler_can_build_shared=no
- enable_shared=no
- ;;
-
- *nto* | *qnx*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- lt_prog_compiler_pic='-fPIC -shared'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec; then
- lt_prog_compiler_pic=-Kconform_pic
- fi
- ;;
-
- *)
- lt_prog_compiler_pic='-fPIC'
- ;;
- esac
- else
- # PORTME Check for flag to pass linker flags through the system compiler.
- case $host_os in
- aix*)
- lt_prog_compiler_wl='-Wl,'
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- lt_prog_compiler_static='-Bstatic'
- else
- lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
- fi
- ;;
-
- mingw* | cygwin* | pw32* | os2* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- lt_prog_compiler_pic='-DDLL_EXPORT'
- ;;
-
- hpux9* | hpux10* | hpux11*)
- lt_prog_compiler_wl='-Wl,'
- # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
- # not for PA HP-UX.
- case $host_cpu in
- hppa*64*|ia64*)
- # +Z the default
- ;;
- *)
- lt_prog_compiler_pic='+Z'
- ;;
- esac
- # Is there a better lt_prog_compiler_static that works with the bundled CC?
- lt_prog_compiler_static='${wl}-a ${wl}archive'
- ;;
-
- irix5* | irix6* | nonstopux*)
- lt_prog_compiler_wl='-Wl,'
- # PIC (with -KPIC) is the default.
- lt_prog_compiler_static='-non_shared'
- ;;
-
- linux* | k*bsd*-gnu)
- case $cc_basename in
- # old Intel for x86_64 which still supported -KPIC.
- ecc*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-static'
- ;;
- # icc used to be incompatible with GCC.
- # ICC 10 doesn't accept -KPIC any more.
- icc* | ifort*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-fPIC'
- lt_prog_compiler_static='-static'
- ;;
- # Lahey Fortran 8.1.
- lf95*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
- pgcc* | pgf77* | pgf90* | pgf95*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-fpic'
- lt_prog_compiler_static='-Bstatic'
- ;;
- ccc*)
- lt_prog_compiler_wl='-Wl,'
- # All Alpha code is PIC.
- lt_prog_compiler_static='-non_shared'
- ;;
- xl*)
- # IBM XL C 8.0/Fortran 10.1 on PPC
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-qpic'
- lt_prog_compiler_static='-qstaticlink'
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C 5.9
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- lt_prog_compiler_wl='-Wl,'
- ;;
- *Sun\ F*)
- # Sun Fortran 8.3 passes all unrecognized flags to the linker
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- lt_prog_compiler_wl=''
- ;;
- esac
- ;;
- esac
- ;;
-
- newsos6)
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- ;;
-
- *nto* | *qnx*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- lt_prog_compiler_pic='-fPIC -shared'
- ;;
-
- osf3* | osf4* | osf5*)
- lt_prog_compiler_wl='-Wl,'
- # All OSF/1 code is PIC.
- lt_prog_compiler_static='-non_shared'
- ;;
-
- rdos*)
- lt_prog_compiler_static='-non_shared'
- ;;
-
- solaris*)
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
- f77* | f90* | f95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
- esac
- ;;
-
- sunos4*)
- lt_prog_compiler_wl='-Qoption ld '
- lt_prog_compiler_pic='-PIC'
- lt_prog_compiler_static='-Bstatic'
- ;;
-
- sysv4 | sysv4.2uw2* | sysv4.3*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec ;then
- lt_prog_compiler_pic='-Kconform_pic'
- lt_prog_compiler_static='-Bstatic'
- fi
- ;;
-
- sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- ;;
-
- unicos*)
- lt_prog_compiler_wl='-Wl,'
- lt_prog_compiler_can_build_shared=no
- ;;
-
- uts4*)
- lt_prog_compiler_pic='-pic'
- lt_prog_compiler_static='-Bstatic'
- ;;
-
- *)
- lt_prog_compiler_can_build_shared=no
- ;;
- esac
- fi
-
-case $host_os in
- # For platforms which do not support PIC, -DPIC is meaningless:
- *djgpp*)
- lt_prog_compiler_pic=
- ;;
- *)
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
-$as_echo "$lt_prog_compiler_pic" >&6; }
-
-
-
-
-
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_pic_works=no
- ac_outfile=conftest.$ac_objext
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
- lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- # The option is referenced via a variable to avoid confusing sed.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:8685: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>conftest.err)
- ac_status=$?
- cat conftest.err >&5
- echo "$as_me:8689: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s "$ac_outfile"; then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings other than the usual output.
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_pic_works=yes
- fi
- fi
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
- case $lt_prog_compiler_pic in
- "" | " "*) ;;
- *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
- esac
-else
- lt_prog_compiler_pic=
- lt_prog_compiler_can_build_shared=no
-fi
-
-fi
-
-
-
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_static_works=no
- save_LDFLAGS="$LDFLAGS"
- LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
- echo "$lt_simple_link_test_code" > conftest.$ac_ext
- if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
- # The linker can only warn and ignore the option if not recognized
- # So say no if there are warnings
- if test -s conftest.err; then
- # Append any errors to the config.log.
- cat conftest.err 1>&5
- $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if diff conftest.exp conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_static_works=yes
- fi
- else
- lt_cv_prog_compiler_static_works=yes
- fi
- fi
- $RM -r conftest*
- LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
-$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works" = xyes; then
- :
-else
- lt_prog_compiler_static=
-fi
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_c_o=no
- $RM -r conftest 2>/dev/null
- mkdir conftest
- cd conftest
- mkdir out
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- lt_compiler_flag="-o out/conftest2.$ac_objext"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:8790: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>out/conftest.err)
- ac_status=$?
- cat out/conftest.err >&5
- echo "$as_me:8794: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s out/conftest2.$ac_objext
- then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp
- $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
- if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_c_o=yes
- fi
- fi
- chmod u+w . 2>&5
- $RM conftest*
- # SGI C++ compiler will create directory out/ii_files/ for
- # template instantiation
- test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
- $RM out/* && rmdir out
- cd ..
- $RM -r conftest
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_c_o=no
- $RM -r conftest 2>/dev/null
- mkdir conftest
- cd conftest
- mkdir out
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- lt_compiler_flag="-o out/conftest2.$ac_objext"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:8845: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>out/conftest.err)
- ac_status=$?
- cat out/conftest.err >&5
- echo "$as_me:8849: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s out/conftest2.$ac_objext
- then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp
- $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
- if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_c_o=yes
- fi
- fi
- chmod u+w . 2>&5
- $RM conftest*
- # SGI C++ compiler will create directory out/ii_files/ for
- # template instantiation
- test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
- $RM out/* && rmdir out
- cd ..
- $RM -r conftest
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
- # do not overwrite the value of need_locks provided by the user
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
- hard_links=yes
- $RM conftest*
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- touch conftest.a
- ln conftest.a conftest.b 2>&5 || hard_links=no
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
- if test "$hard_links" = no; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
- need_locks=warn
- fi
-else
- need_locks=no
-fi
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
- runpath_var=
- allow_undefined_flag=
- always_export_symbols=no
- archive_cmds=
- archive_expsym_cmds=
- compiler_needs_object=no
- enable_shared_with_static_runtimes=no
- export_dynamic_flag_spec=
- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- hardcode_automatic=no
- hardcode_direct=no
- hardcode_direct_absolute=no
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld=
- hardcode_libdir_separator=
- hardcode_minus_L=no
- hardcode_shlibpath_var=unsupported
- inherit_rpath=no
- link_all_deplibs=unknown
- module_cmds=
- module_expsym_cmds=
- old_archive_from_new_cmds=
- old_archive_from_expsyms_cmds=
- thread_safe_flag_spec=
- whole_archive_flag_spec=
- # include_expsyms should be a list of space-separated symbols to be *always*
- # included in the symbol list
- include_expsyms=
- # exclude_expsyms can be an extended regexp of symbols to exclude
- # it will be wrapped by ` (' and `)$', so one must not match beginning or
- # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
- # as well as any symbol that contains `d'.
- exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
- # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
- # platforms (ab)use it in PIC code, but their linkers get confused if
- # the symbol is explicitly referenced. Since portable code cannot
- # rely on this symbol name, it's probably fine to never include it in
- # preloaded symbol tables.
- # Exclude shared library initialization/finalization symbols.
- extract_expsyms_cmds=
-
- case $host_os in
- cygwin* | mingw* | pw32* | cegcc*)
- # FIXME: the MSVC++ port hasn't been tested in a loooong time
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- if test "$GCC" != yes; then
- with_gnu_ld=no
- fi
- ;;
- interix*)
- # we just hope/assume this is gcc and not c89 (= MSVC++)
- with_gnu_ld=yes
- ;;
- openbsd*)
- with_gnu_ld=no
- ;;
- esac
-
- ld_shlibs=yes
- if test "$with_gnu_ld" = yes; then
- # If archive_cmds runs LD, not CC, wlarc should be empty
- wlarc='${wl}'
-
- # Set some defaults for GNU ld with shared library support. These
- # are reset later if shared libraries are not supported. Putting them
- # here allows them to be overridden if necessary.
- runpath_var=LD_RUN_PATH
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- export_dynamic_flag_spec='${wl}--export-dynamic'
- # ancient GNU ld didn't support --whole-archive et. al.
- if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
- whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- else
- whole_archive_flag_spec=
- fi
- supports_anon_versioning=no
- case `$LD -v 2>&1` in
- *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
- *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
- *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
- *\ 2.11.*) ;; # other 2.11 versions
- *) supports_anon_versioning=yes ;;
- esac
-
- # See if GNU ld supports shared libraries.
- case $host_os in
- aix[3-9]*)
- # On AIX/PPC, the GNU linker is very broken
- if test "$host_cpu" != ia64; then
- ld_shlibs=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.9.1, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support. If you
-*** really care for shared libraries, you may want to modify your PATH
-*** so that a non-GNU linker is found, and then restart.
-
-_LT_EOF
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds=''
- ;;
- m68k)
- archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- ;;
- esac
- ;;
-
- beos*)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- allow_undefined_flag=unsupported
- # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
- # support --undefined. This deserves some investigation. FIXME
- archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- else
- ld_shlibs=no
- fi
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
- # as there is no search path for DLLs.
- hardcode_libdir_flag_spec='-L$libdir'
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- # If the export-symbols file already is a .def file (1st line
- # is EXPORTS), use it as is; otherwise, prepend...
- archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
- cp $export_symbols $output_objdir/$soname.def;
- else
- echo EXPORTS > $output_objdir/$soname.def;
- cat $export_symbols >> $output_objdir/$soname.def;
- fi~
- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- else
- ld_shlibs=no
- fi
- ;;
-
- interix[3-9]*)
- hardcode_direct=no
- hardcode_shlibpath_var=no
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- export_dynamic_flag_spec='${wl}-E'
- # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
- # Instead, shared libraries are loaded at an image base (0x10000000 by
- # default) and relocated if they conflict, which is a slow very memory
- # consuming and fragmenting process. To avoid this, we pick a random,
- # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
- # time. Moving up from 0x10000000 also allows more sbrk(2) space.
- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- ;;
-
- gnu* | linux* | tpf* | k*bsd*-gnu)
- tmp_diet=no
- if test "$host_os" = linux-dietlibc; then
- case $cc_basename in
- diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn)
- esac
- fi
- if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
- && test "$tmp_diet" = no
- then
- tmp_addflag=
- tmp_sharedflag='-shared'
- case $cc_basename,$host_cpu in
- pgcc*) # Portland Group C compiler
- whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- tmp_addflag=' $pic_flag'
- ;;
- pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers
- whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- tmp_addflag=' $pic_flag -Mnomain' ;;
- ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64
- tmp_addflag=' -i_dynamic' ;;
- efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64
- tmp_addflag=' -i_dynamic -nofor_main' ;;
- ifc* | ifort*) # Intel Fortran compiler
- tmp_addflag=' -nofor_main' ;;
- lf95*) # Lahey Fortran 8.1
- whole_archive_flag_spec=
- tmp_sharedflag='--shared' ;;
- xl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
- tmp_sharedflag='-qmkshrobj'
- tmp_addflag= ;;
- esac
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*) # Sun C 5.9
- whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- compiler_needs_object=yes
- tmp_sharedflag='-G' ;;
- *Sun\ F*) # Sun Fortran 8.3
- tmp_sharedflag='-G' ;;
- esac
- archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
- fi
-
- case $cc_basename in
- xlf*)
- # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
- else
- ld_shlibs=no
- fi
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
- solaris*)
- if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
- ld_shlibs=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems. Therefore, libtool
-*** is disabling shared libraries support. We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer. Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
- ;;
-
- sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
- case `$LD -v 2>&1` in
- *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
- ld_shlibs=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems. Therefore, libtool
-*** is disabling shared libraries support. We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer. Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
- ;;
- *)
- # For security reasons, it is highly recommended that you always
- # use absolute paths for naming shared libraries, and exclude the
- # DT_RUNPATH tag from executables and libraries. But doing so
- # requires that you compile everything twice, which is a pain.
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
- ;;
- esac
- ;;
-
- sunos4*)
- archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- wlarc=
- hardcode_direct=yes
- hardcode_shlibpath_var=no
- ;;
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
- ;;
- esac
-
- if test "$ld_shlibs" = no; then
- runpath_var=
- hardcode_libdir_flag_spec=
- export_dynamic_flag_spec=
- whole_archive_flag_spec=
- fi
- else
- # PORTME fill in a description of your system's linker (not GNU ld)
- case $host_os in
- aix3*)
- allow_undefined_flag=unsupported
- always_export_symbols=yes
- archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
- # Note: this linker hardcodes the directories in LIBPATH if there
- # are no directories specified by -L.
- hardcode_minus_L=yes
- if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
- # Neither direct hardcoding nor static linking is supported with a
- # broken collect2.
- hardcode_direct=unsupported
- fi
- ;;
-
- aix[4-9]*)
- if test "$host_cpu" = ia64; then
- # On IA64, the linker does run time linking by default, so we don't
- # have to do anything special.
- aix_use_runtimelinking=no
- exp_sym_flag='-Bexport'
- no_entry_flag=""
- else
- # If we're using GNU nm, then we don't want the "-C" option.
- # -C means demangle to AIX nm, but means don't demangle with GNU nm
- if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- else
- export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- fi
- aix_use_runtimelinking=no
-
- # Test if we are trying to use run time linking or normal
- # AIX style linking. If -brtl is somewhere in LDFLAGS, we
- # need to do runtime linking.
- case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
- for ld_flag in $LDFLAGS; do
- if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
- aix_use_runtimelinking=yes
- break
- fi
- done
- ;;
- esac
-
- exp_sym_flag='-bexport'
- no_entry_flag='-bnoentry'
- fi
-
- # When large executables or shared objects are built, AIX ld can
- # have problems creating the table of contents. If linking a library
- # or program results in "error TOC overflow" add -mminimal-toc to
- # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
- # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
- archive_cmds=''
- hardcode_direct=yes
- hardcode_direct_absolute=yes
- hardcode_libdir_separator=':'
- link_all_deplibs=yes
- file_list_spec='${wl}-f,'
-
- if test "$GCC" = yes; then
- case $host_os in aix4.[012]|aix4.[012].*)
- # We only want to do this on AIX 4.2 and lower, the check
- # below for broken collect2 doesn't work under 4.3+
- collect2name=`${CC} -print-prog-name=collect2`
- if test -f "$collect2name" &&
- strings "$collect2name" | $GREP resolve_lib_name >/dev/null
- then
- # We have reworked collect2
- :
- else
- # We have old collect2
- hardcode_direct=unsupported
- # It fails to find uninstalled libraries when the uninstalled
- # path is not listed in the libpath. Setting hardcode_minus_L
- # to unsupported forces relinking
- hardcode_minus_L=yes
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_libdir_separator=
- fi
- ;;
- esac
- shared_flag='-shared'
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag="$shared_flag "'${wl}-G'
- fi
- else
- # not using gcc
- if test "$host_cpu" = ia64; then
- # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
- # chokes on -Wl,-G. The following line is correct:
- shared_flag='-G'
- else
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag='${wl}-G'
- else
- shared_flag='${wl}-bM:SRE'
- fi
- fi
- fi
-
- export_dynamic_flag_spec='${wl}-bexpall'
- # It seems that -bexpall does not export symbols beginning with
- # underscore (_), so it is better to generate a list of symbols to export.
- always_export_symbols=yes
- if test "$aix_use_runtimelinking" = yes; then
- # Warning - without using the other runtime loading flags (-brtl),
- # -berok will link without error, but may produce a broken library.
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-lt_aix_libpath_sed='
- /Import File Strings/,/^$/ {
- /^0/ {
- s/^0 *\(.*\)$/\1/
- p
- }
- }'
-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-# Check for a 64-bit object if we didn't find anything.
-if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
- else
- if test "$host_cpu" = ia64; then
- hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
- allow_undefined_flag="-z nodefs"
- archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-lt_aix_libpath_sed='
- /Import File Strings/,/^$/ {
- /^0/ {
- s/^0 *\(.*\)$/\1/
- p
- }
- }'
-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-# Check for a 64-bit object if we didn't find anything.
-if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
- no_undefined_flag=' ${wl}-bernotok'
- allow_undefined_flag=' ${wl}-berok'
- # Exported symbols can be pulled into shared objects from archives
- whole_archive_flag_spec='$convenience'
- archive_cmds_need_lc=yes
- # This is similar to how AIX traditionally builds its shared libraries.
- archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
- fi
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds=''
- ;;
- m68k)
- archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- ;;
- esac
- ;;
-
- bsdi[45]*)
- export_dynamic_flag_spec=-rdynamic
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
- hardcode_libdir_flag_spec=' '
- allow_undefined_flag=unsupported
- # Tell ltmain to make .lib files, not .a files.
- libext=lib
- # Tell ltmain to make .dll files, not .so files.
- shrext_cmds=".dll"
- # FIXME: Setting linknames here is a bad hack.
- archive_cmds='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames='
- # The linker will automatically build a .lib file if we build a DLL.
- old_archive_from_new_cmds='true'
- # FIXME: Should let the user specify the lib program.
- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
- fix_srcfile_path='`cygpath -w "$srcfile"`'
- enable_shared_with_static_runtimes=yes
- ;;
-
- darwin* | rhapsody*)
-
-
- archive_cmds_need_lc=no
- hardcode_direct=no
- hardcode_automatic=yes
- hardcode_shlibpath_var=unsupported
- whole_archive_flag_spec=''
- link_all_deplibs=yes
- allow_undefined_flag="$_lt_dar_allow_undefined"
- case $cc_basename in
- ifort*) _lt_dar_can_shared=yes ;;
- *) _lt_dar_can_shared=$GCC ;;
- esac
- if test "$_lt_dar_can_shared" = "yes"; then
- output_verbose_link_cmd=echo
- archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
- module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
- archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
- module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
-
- else
- ld_shlibs=no
- fi
-
- ;;
-
- dgux*)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_shlibpath_var=no
- ;;
-
- freebsd1*)
- ld_shlibs=no
- ;;
-
- # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
- # support. Future versions do this automatically, but an explicit c++rt0.o
- # does not break anything, and helps significantly (at the cost of a little
- # extra space).
- freebsd2.2*)
- archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
- ;;
-
- # Unfortunately, older versions of FreeBSD 2 do not have this feature.
- freebsd2*)
- archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- hardcode_direct=yes
- hardcode_minus_L=yes
- hardcode_shlibpath_var=no
- ;;
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
- ;;
-
- hpux9*)
- if test "$GCC" = yes; then
- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator=:
- hardcode_direct=yes
-
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- export_dynamic_flag_spec='${wl}-E'
- ;;
-
- hpux10*)
- if test "$GCC" = yes -a "$with_gnu_ld" = no; then
- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
- if test "$with_gnu_ld" = no; then
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_flag_spec_ld='+b $libdir'
- hardcode_libdir_separator=:
- hardcode_direct=yes
- hardcode_direct_absolute=yes
- export_dynamic_flag_spec='${wl}-E'
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- fi
- ;;
-
- hpux11*)
- if test "$GCC" = yes -a "$with_gnu_ld" = no; then
- case $host_cpu in
- hppa*64*)
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
- case $host_cpu in
- hppa*64*)
- archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
- archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- fi
- if test "$with_gnu_ld" = no; then
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator=:
-
- case $host_cpu in
- hppa*64*|ia64*)
- hardcode_direct=no
- hardcode_shlibpath_var=no
- ;;
- *)
- hardcode_direct=yes
- hardcode_direct_absolute=yes
- export_dynamic_flag_spec='${wl}-E'
-
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- ;;
- esac
- fi
- ;;
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
- save_LDFLAGS="$LDFLAGS"
- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-int foo(void) {}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
- LDFLAGS="$save_LDFLAGS"
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
- fi
- archive_cmds_need_lc='no'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- inherit_rpath=yes
- link_all_deplibs=yes
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out
- else
- archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF
- fi
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
- ;;
-
- newsos6)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_direct=yes
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- hardcode_shlibpath_var=no
- ;;
-
- *nto* | *qnx*)
- ;;
-
- openbsd*)
- if test -f /usr/libexec/ld.so; then
- hardcode_direct=yes
- hardcode_shlibpath_var=no
- hardcode_direct_absolute=yes
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- export_dynamic_flag_spec='${wl}-E'
- else
- case $host_os in
- openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
- archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- ;;
- *)
- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- ;;
- esac
- fi
- else
- ld_shlibs=no
- fi
- ;;
-
- os2*)
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- allow_undefined_flag=unsupported
- archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
- old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
- ;;
-
- osf3*)
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
- allow_undefined_flag=' -expect_unresolved \*'
- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- fi
- archive_cmds_need_lc='no'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- ;;
-
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
- $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
- # Both c and cxx compiler support -rpath directly
- hardcode_libdir_flag_spec='-rpath $libdir'
- fi
- archive_cmds_need_lc='no'
- hardcode_libdir_separator=:
- ;;
-
- solaris*)
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
- wlarc=''
- archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
- ;;
- *)
- wlarc='${wl}'
- archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- ;;
- esac
- fi
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_shlibpath_var=no
- case $host_os in
- solaris2.[0-5] | solaris2.[0-5].*) ;;
- *)
- # The compiler driver will combine and reorder linker options,
- # but understands `-z linker_flag'. GCC discards it without `$wl',
- # but is careful enough not to reorder.
- # Supported since Solaris 2.6 (maybe 2.5.1?)
- if test "$GCC" = yes; then
- whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
- else
- whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
- fi
- ;;
- esac
- link_all_deplibs=yes
- ;;
-
- sunos4*)
- if test "x$host_vendor" = xsequent; then
- # Use $CC to link under sequent, because it throws in some extra .o
- # files that make .init and .fini sections work.
- archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
- fi
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_direct=yes
- hardcode_minus_L=yes
- hardcode_shlibpath_var=no
- ;;
-
- sysv4)
- case $host_vendor in
- sni)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_direct=yes # is this really true???
- ;;
- siemens)
- ## LD is ld it makes a PLAMLIB
- ## CC just makes a GrossModule.
- archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
- reload_cmds='$CC -r -o $output$reload_objs'
- hardcode_direct=no
- ;;
- motorola)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_direct=no #Motorola manual says yes, but my tests say they lie
- ;;
- esac
- runpath_var='LD_RUN_PATH'
- hardcode_shlibpath_var=no
- ;;
-
- sysv4.3*)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_shlibpath_var=no
- export_dynamic_flag_spec='-Bexport'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec; then
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_shlibpath_var=no
- runpath_var=LD_RUN_PATH
- hardcode_runpath_var=yes
- ld_shlibs=yes
- fi
- ;;
-
- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
- no_undefined_flag='${wl}-z,text'
- archive_cmds_need_lc=no
- hardcode_shlibpath_var=no
- runpath_var='LD_RUN_PATH'
-
- if test "$GCC" = yes; then
- archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- fi
- ;;
-
- sysv5* | sco3.2v5* | sco5v6*)
- # Note: We can NOT use -z defs as we might desire, because we do not
- # link with -lc, and that would cause any symbols used from libc to
- # always be unresolved, which means just about no library would
- # ever link correctly. If we're not using GNU ld we use -z text
- # though, which does catch some bad symbols but isn't as heavy-handed
- # as -z defs.
- no_undefined_flag='${wl}-z,text'
- allow_undefined_flag='${wl}-z,nodefs'
- archive_cmds_need_lc=no
- hardcode_shlibpath_var=no
- hardcode_libdir_flag_spec='${wl}-R,$libdir'
- hardcode_libdir_separator=':'
- link_all_deplibs=yes
- export_dynamic_flag_spec='${wl}-Bexport'
- runpath_var='LD_RUN_PATH'
-
- if test "$GCC" = yes; then
- archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- fi
- ;;
-
- uts4*)
- archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_shlibpath_var=no
- ;;
-
- *)
- ld_shlibs=no
- ;;
- esac
-
- if test x$host_vendor = xsni; then
- case $host in
- sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
- export_dynamic_flag_spec='${wl}-Blargedynsym'
- ;;
- esac
- fi
- fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
-$as_echo "$ld_shlibs" >&6; }
-test "$ld_shlibs" = no && can_build_shared=no
-
-with_gnu_ld=$with_gnu_ld
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc" in
-x|xyes)
- # Assume -lc should be added
- archive_cmds_need_lc=yes
-
- if test "$enable_shared" = yes && test "$GCC" = yes; then
- case $archive_cmds in
- *'~'*)
- # FIXME: we may have to deal with multi-command sequences.
- ;;
- '$CC '*)
- # Test whether the compiler implicitly links with -lc since on some
- # systems, -lgcc has to come before -lc. If gcc already passes -lc
- # to ld, don't add -lc before -lgcc.
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
- $RM conftest*
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } 2>conftest.err; then
- soname=conftest
- lib=conftest
- libobjs=conftest.$ac_objext
- deplibs=
- wl=$lt_prog_compiler_wl
- pic_flag=$lt_prog_compiler_pic
- compiler_flags=-v
- linker_flags=-v
- verstring=
- output_objdir=.
- libname=conftest
- lt_save_allow_undefined_flag=$allow_undefined_flag
- allow_undefined_flag=
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
- (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
- then
- archive_cmds_need_lc=no
- else
- archive_cmds_need_lc=yes
- fi
- allow_undefined_flag=$lt_save_allow_undefined_flag
- else
- cat conftest.err 1>&5
- fi
- $RM conftest*
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $archive_cmds_need_lc" >&5
-$as_echo "$archive_cmds_need_lc" >&6; }
- ;;
- esac
- fi
- ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-if test "$GCC" = yes; then
- case $host_os in
- darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
- *) lt_awk_arg="/^libraries:/" ;;
- esac
- lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"`
- if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then
- # if the path contains ";" then we assume it to be the separator
- # otherwise default to the standard path separator (i.e. ":") - it is
- # assumed that no part of a normal pathname contains ";" but that should
- # okay in the real world where ";" in dirpaths is itself problematic.
- lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'`
- else
- lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
- fi
- # Ok, now we have the path, separated by spaces, we can step through it
- # and add multilib dir if necessary.
- lt_tmp_lt_search_path_spec=
- lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
- for lt_sys_path in $lt_search_path_spec; do
- if test -d "$lt_sys_path/$lt_multi_os_dir"; then
- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
- else
- test -d "$lt_sys_path" && \
- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
- fi
- done
- lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk '
-BEGIN {RS=" "; FS="/|\n";} {
- lt_foo="";
- lt_count=0;
- for (lt_i = NF; lt_i > 0; lt_i--) {
- if ($lt_i != "" && $lt_i != ".") {
- if ($lt_i == "..") {
- lt_count++;
- } else {
- if (lt_count == 0) {
- lt_foo="/" $lt_i lt_foo;
- } else {
- lt_count--;
- }
- }
- }
- }
- if (lt_foo != "") { lt_freq[lt_foo]++; }
- if (lt_freq[lt_foo] == 1) { print lt_foo; }
-}'`
- sys_lib_search_path_spec=`$ECHO $lt_search_path_spec`
-else
- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
-fi
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
- shlibpath_var=LIBPATH
-
- # AIX 3 has no versioning support, so we append a major version to the name.
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
-
-aix[4-9]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- hardcode_into_libs=yes
- if test "$host_cpu" = ia64; then
- # AIX 5 supports IA64
- library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- else
- # With GCC up to 2.95.x, collect2 would create an import file
- # for dependence libraries. The import file would start with
- # the line `#! .'. This would cause the generated library to
- # depend on `.', always an invalid library. This was fixed in
- # development snapshots of GCC prior to 3.0.
- case $host_os in
- aix4 | aix4.[01] | aix4.[01].*)
- if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
- echo ' yes '
- echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
- :
- else
- can_build_shared=no
- fi
- ;;
- esac
- # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
- # soname into executable. Probably we can add versioning support to
- # collect2, so additional links can be useful in future.
- if test "$aix_use_runtimelinking" = yes; then
- # If using run time linking (on AIX 4.2 or later) use lib<name>.so
- # instead of lib<name>.a to let people know that these are not
- # typical AIX shared libraries.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- else
- # We preserve .a as extension for shared libraries through AIX4.2
- # and later when we are not doing run time linking.
- library_names_spec='${libname}${release}.a $libname.a'
- soname_spec='${libname}${release}${shared_ext}$major'
- fi
- shlibpath_var=LIBPATH
- fi
- ;;
-
-amigaos*)
- case $host_cpu in
- powerpc)
- # Since July 2007 AmigaOS4 officially supports .so libraries.
- # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- ;;
- m68k)
- library_names_spec='$libname.ixlibrary $libname.a'
- # Create ${libname}_ixlibrary.a entries in /sys/libs.
- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
- ;;
- esac
- ;;
-
-beos*)
- library_names_spec='${libname}${shared_ext}'
- dynamic_linker="$host_os ld.so"
- shlibpath_var=LIBRARY_PATH
- ;;
-
-bsdi[45]*)
- version_type=linux
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
- sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
- # the default ld.so.conf also contains /usr/contrib/lib and
- # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
- # libtool to hard-code these into programs
- ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
- version_type=windows
- shrext_cmds=".dll"
- need_version=no
- need_lib_prefix=no
-
- case $GCC,$host_os in
- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
- dldir=$destdir/`dirname \$dlpath`~
- test -d \$dldir || mkdir -p \$dldir~
- $install_prog $dir/$dlname \$dldir/$dlname~
- chmod a+x \$dldir/$dlname~
- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
- fi'
- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
- dlpath=$dir/\$dldll~
- $RM \$dlpath'
- shlibpath_overrides_runpath=yes
-
- case $host_os in
- cygwin*)
- # Cygwin DLLs use 'cyg' prefix rather than 'lib'
- soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib"
- ;;
- mingw* | cegcc*)
- # MinGW DLLs use traditional 'lib' prefix
- soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"`
- if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
- # It is most probably a Windows format PATH printed by
- # mingw gcc, but we are running on Cygwin. Gcc prints its search
- # path with ; separators, and with drive letters. We can handle the
- # drive letters (cygwin fileutils understands them), so leave them,
- # especially as we might pass files found there to a mingw objdump,
- # which wouldn't understand a cygwinified path. Ahh.
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
- else
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
- fi
- ;;
- pw32*)
- # pw32 DLLs use 'pw' prefix rather than 'lib'
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
- ;;
-
- *)
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
- ;;
- esac
- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-
-darwin* | rhapsody*)
- dynamic_linker="$host_os dyld"
- version_type=darwin
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
- soname_spec='${libname}${release}${major}$shared_ext'
- shlibpath_overrides_runpath=yes
- shlibpath_var=DYLD_LIBRARY_PATH
- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
- sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
- sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
- ;;
-
-dgux*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-freebsd1*)
- dynamic_linker=no
- ;;
-
-freebsd* | dragonfly*)
- # DragonFly does not have aout. When/if they implement a new
- # versioning mechanism, adjust this.
- if test -x /usr/bin/objformat; then
- objformat=`/usr/bin/objformat`
- else
- case $host_os in
- freebsd[123]*) objformat=aout ;;
- *) objformat=elf ;;
- esac
- fi
- version_type=freebsd-$objformat
- case $version_type in
- freebsd-elf*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- need_version=no
- need_lib_prefix=no
- ;;
- freebsd-*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
- need_version=yes
- ;;
- esac
- shlibpath_var=LD_LIBRARY_PATH
- case $host_os in
- freebsd2*)
- shlibpath_overrides_runpath=yes
- ;;
- freebsd3.[01]* | freebsdelf3.[01]*)
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
- freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
- *) # from 4.6 on, and DragonFly
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- esac
- ;;
-
-gnu*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- hardcode_into_libs=yes
- ;;
-
-hpux9* | hpux10* | hpux11*)
- # Give a soname corresponding to the major version so that dld.sl refuses to
- # link against other versions.
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- case $host_cpu in
- ia64*)
- shrext_cmds='.so'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.so"
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- if test "X$HPUX_IA64_MODE" = X32; then
- sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
- else
- sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
- fi
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- hppa*64*)
- shrext_cmds='.sl'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- *)
- shrext_cmds='.sl'
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=SHLIB_PATH
- shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
- esac
- # HP-UX runs *really* slowly unless shared libraries are mode 555.
- postinstall_cmds='chmod 555 $lib'
- ;;
-
-interix[3-9]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-irix5* | irix6* | nonstopux*)
- case $host_os in
- nonstopux*) version_type=nonstopux ;;
- *)
- if test "$lt_cv_prog_gnu_ld" = yes; then
- version_type=linux
- else
- version_type=irix
- fi ;;
- esac
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
- case $host_os in
- irix5* | nonstopux*)
- libsuff= shlibsuff=
- ;;
- *)
- case $LD in # libtool.m4 will add one of these switches to LD
- *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
- libsuff= shlibsuff= libmagic=32-bit;;
- *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
- libsuff=32 shlibsuff=N32 libmagic=N32;;
- *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
- libsuff=64 shlibsuff=64 libmagic=64-bit;;
- *) libsuff= shlibsuff= libmagic=never-match;;
- esac
- ;;
- esac
- shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
- sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
- hardcode_into_libs=yes
- ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
- dynamic_linker=no
- ;;
-
-# This must be Linux ELF.
-linux* | k*bsd*-gnu)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- # Some binutils ld are patched to set DT_RUNPATH
- save_LDFLAGS=$LDFLAGS
- save_libdir=$libdir
- eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
- LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
- shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
- LDFLAGS=$save_LDFLAGS
- libdir=$save_libdir
-
- # This implies no fast_install, which is unacceptable.
- # Some rework will be needed to allow for fast_install
- # before this can be enabled.
- hardcode_into_libs=yes
-
- # Append ld.so.conf contents to the search path
- if test -f /etc/ld.so.conf; then
- lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '`
- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
- fi
-
- # We used to test for /lib/ld.so.1 and disable shared libraries on
- # powerpc, because MkLinux only supported shared libraries with the
- # GNU dynamic linker. Since this was broken with cross compilers,
- # most powerpc-linux boxes support dynamic linking these days and
- # people can always --disable-shared, the test was removed, and we
- # assume the GNU/Linux dynamic linker is in use.
- dynamic_linker='GNU/Linux ld.so'
- ;;
-
-netbsd*)
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- dynamic_linker='NetBSD (a.out) ld.so'
- else
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='NetBSD ld.elf_so'
- fi
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
-
-newsos6)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- ;;
-
-*nto* | *qnx*)
- version_type=qnx
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- dynamic_linker='ldqnx.so'
- ;;
-
-openbsd*)
- version_type=sunos
- sys_lib_dlsearch_path_spec="/usr/lib"
- need_lib_prefix=no
- # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
- case $host_os in
- openbsd3.3 | openbsd3.3.*) need_version=yes ;;
- *) need_version=no ;;
- esac
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- case $host_os in
- openbsd2.[89] | openbsd2.[89].*)
- shlibpath_overrides_runpath=no
- ;;
- *)
- shlibpath_overrides_runpath=yes
- ;;
- esac
- else
- shlibpath_overrides_runpath=yes
- fi
- ;;
-
-os2*)
- libname_spec='$name'
- shrext_cmds=".dll"
- need_lib_prefix=no
- library_names_spec='$libname${shared_ext} $libname.a'
- dynamic_linker='OS/2 ld.exe'
- shlibpath_var=LIBPATH
- ;;
-
-osf3* | osf4* | osf5*)
- version_type=osf
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
- sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
- ;;
-
-rdos*)
- dynamic_linker=no
- ;;
-
-solaris*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- # ldd complains unless libraries are executable
- postinstall_cmds='chmod +x $lib'
- ;;
-
-sunos4*)
- version_type=sunos
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- if test "$with_gnu_ld" = yes; then
- need_lib_prefix=no
- fi
- need_version=yes
- ;;
-
-sysv4 | sysv4.3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- case $host_vendor in
- sni)
- shlibpath_overrides_runpath=no
- need_lib_prefix=no
- runpath_var=LD_RUN_PATH
- ;;
- siemens)
- need_lib_prefix=no
- ;;
- motorola)
- need_lib_prefix=no
- need_version=no
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
- ;;
- esac
- ;;
-
-sysv4*MP*)
- if test -d /usr/nec ;then
- version_type=linux
- library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
- soname_spec='$libname${shared_ext}.$major'
- shlibpath_var=LD_LIBRARY_PATH
- fi
- ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- version_type=freebsd-elf
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- if test "$with_gnu_ld" = yes; then
- sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
- else
- sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
- case $host_os in
- sco3.2v5*)
- sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
- ;;
- esac
- fi
- sys_lib_dlsearch_path_spec='/usr/lib'
- ;;
-
-tpf*)
- # TPF is a cross-target only. Preferred cross-host = GNU/Linux.
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-uts4*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-*)
- dynamic_linker=no
- ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
- variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
- sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
- sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action=
-if test -n "$hardcode_libdir_flag_spec" ||
- test -n "$runpath_var" ||
- test "X$hardcode_automatic" = "Xyes" ; then
-
- # We can hardcode non-existent directories.
- if test "$hardcode_direct" != no &&
- # If the only mechanism to avoid hardcoding is shlibpath_var, we
- # have to relink, otherwise we might link with an installed library
- # when we should be linking with a yet-to-be-installed one
- ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
- test "$hardcode_minus_L" != no; then
- # Linking always hardcodes the temporary library directory.
- hardcode_action=relink
- else
- # We can link without hardcoding, and we can hardcode nonexisting dirs.
- hardcode_action=immediate
- fi
-else
- # We cannot hardcode anything, or else we can only hardcode existing
- # directories.
- hardcode_action=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
-$as_echo "$hardcode_action" >&6; }
-
-if test "$hardcode_action" = relink ||
- test "$inherit_rpath" = yes; then
- # Fast installation is not supported
- enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
- test "$enable_shared" = no; then
- # Fast installation is not necessary
- enable_fast_install=needless
-fi
-
-
-
-
-
-
- if test "x$enable_dlopen" != xyes; then
- enable_dlopen=unknown
- enable_dlopen_self=unknown
- enable_dlopen_self_static=unknown
-else
- lt_cv_dlopen=no
- lt_cv_dlopen_libs=
-
- case $host_os in
- beos*)
- lt_cv_dlopen="load_add_on"
- lt_cv_dlopen_libs=
- lt_cv_dlopen_self=yes
- ;;
-
- mingw* | pw32* | cegcc*)
- lt_cv_dlopen="LoadLibrary"
- lt_cv_dlopen_libs=
- ;;
-
- cygwin*)
- lt_cv_dlopen="dlopen"
- lt_cv_dlopen_libs=
- ;;
-
- darwin*)
- # if libdl is installed we need to link against it
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_dl_dlopen=yes
-else
- ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
- lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
-
- lt_cv_dlopen="dyld"
- lt_cv_dlopen_libs=
- lt_cv_dlopen_self=yes
-
-fi
-
- ;;
-
- *)
- ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
-if test "x$ac_cv_func_shl_load" = xyes; then :
- lt_cv_dlopen="shl_load"
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
-$as_echo_n "checking for shl_load in -ldld... " >&6; }
-if ${ac_cv_lib_dld_shl_load+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char shl_load ();
-int
-main ()
-{
-return shl_load ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_dld_shl_load=yes
-else
- ac_cv_lib_dld_shl_load=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
-$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
-if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
- lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
-else
- ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
-if test "x$ac_cv_func_dlopen" = xyes; then :
- lt_cv_dlopen="dlopen"
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_dl_dlopen=yes
-else
- ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
- lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
-$as_echo_n "checking for dlopen in -lsvld... " >&6; }
-if ${ac_cv_lib_svld_dlopen+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-lsvld $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_svld_dlopen=yes
-else
- ac_cv_lib_svld_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
-$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
-if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
- lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
-$as_echo_n "checking for dld_link in -ldld... " >&6; }
-if ${ac_cv_lib_dld_dld_link+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-/* Override any GCC internal prototype to avoid an error.
- Use char because int might match the return type of a GCC
- builtin and then its argument prototype would still apply. */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dld_link ();
-int
-main ()
-{
-return dld_link ();
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
- ac_cv_lib_dld_dld_link=yes
-else
- ac_cv_lib_dld_dld_link=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
-$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
-if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
- lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
- ;;
- esac
-
- if test "x$lt_cv_dlopen" != xno; then
- enable_dlopen=yes
- else
- enable_dlopen=no
- fi
-
- case $lt_cv_dlopen in
- dlopen)
- save_CPPFLAGS="$CPPFLAGS"
- test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
-
- save_LDFLAGS="$LDFLAGS"
- wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
-
- save_LIBS="$LIBS"
- LIBS="$lt_cv_dlopen_libs $LIBS"
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
-$as_echo_n "checking whether a program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test "$cross_compiling" = yes; then :
- lt_cv_dlopen_self=cross
-else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
-#line 11212 "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-# define LT_DLGLOBAL RTLD_GLOBAL
-#else
-# ifdef DL_GLOBAL
-# define LT_DLGLOBAL DL_GLOBAL
-# else
-# define LT_DLGLOBAL 0
-# endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
- find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-# ifdef RTLD_LAZY
-# define LT_DLLAZY_OR_NOW RTLD_LAZY
-# else
-# ifdef DL_LAZY
-# define LT_DLLAZY_OR_NOW DL_LAZY
-# else
-# ifdef RTLD_NOW
-# define LT_DLLAZY_OR_NOW RTLD_NOW
-# else
-# ifdef DL_NOW
-# define LT_DLLAZY_OR_NOW DL_NOW
-# else
-# define LT_DLLAZY_OR_NOW 0
-# endif
-# endif
-# endif
-# endif
-#endif
-
-void fnord() { int i=42;}
-int main ()
-{
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
- int status = $lt_dlunknown;
-
- if (self)
- {
- if (dlsym (self,"fnord")) status = $lt_dlno_uscore;
- else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore;
- /* dlclose (self); */
- }
- else
- puts (dlerror ());
-
- return status;
-}
-_LT_EOF
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
- (eval $ac_link) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
- (./conftest; exit; ) >&5 2>/dev/null
- lt_status=$?
- case x$lt_status in
- x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
- x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
- x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
- esac
- else :
- # compilation failed
- lt_cv_dlopen_self=no
- fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
-$as_echo "$lt_cv_dlopen_self" >&6; }
-
- if test "x$lt_cv_dlopen_self" = xyes; then
- wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
-$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self_static+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test "$cross_compiling" = yes; then :
- lt_cv_dlopen_self_static=cross
-else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
-#line 11308 "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-# define LT_DLGLOBAL RTLD_GLOBAL
-#else
-# ifdef DL_GLOBAL
-# define LT_DLGLOBAL DL_GLOBAL
-# else
-# define LT_DLGLOBAL 0
-# endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
- find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-# ifdef RTLD_LAZY
-# define LT_DLLAZY_OR_NOW RTLD_LAZY
-# else
-# ifdef DL_LAZY
-# define LT_DLLAZY_OR_NOW DL_LAZY
-# else
-# ifdef RTLD_NOW
-# define LT_DLLAZY_OR_NOW RTLD_NOW
-# else
-# ifdef DL_NOW
-# define LT_DLLAZY_OR_NOW DL_NOW
-# else
-# define LT_DLLAZY_OR_NOW 0
-# endif
-# endif
-# endif
-# endif
-#endif
-
-void fnord() { int i=42;}
-int main ()
-{
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
- int status = $lt_dlunknown;
-
- if (self)
- {
- if (dlsym (self,"fnord")) status = $lt_dlno_uscore;
- else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore;
- /* dlclose (self); */
- }
- else
- puts (dlerror ());
-
- return status;
-}
-_LT_EOF
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
- (eval $ac_link) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
- (./conftest; exit; ) >&5 2>/dev/null
- lt_status=$?
- case x$lt_status in
- x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
- x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
- x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
- esac
- else :
- # compilation failed
- lt_cv_dlopen_self_static=no
- fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
-$as_echo "$lt_cv_dlopen_self_static" >&6; }
- fi
-
- CPPFLAGS="$save_CPPFLAGS"
- LDFLAGS="$save_LDFLAGS"
- LIBS="$save_LIBS"
- ;;
- esac
-
- case $lt_cv_dlopen_self in
- yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
- *) enable_dlopen_self=unknown ;;
- esac
-
- case $lt_cv_dlopen_self_static in
- yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
- *) enable_dlopen_self_static=unknown ;;
- esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-striplib=
-old_striplib=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
-$as_echo_n "checking whether stripping libraries is possible... " >&6; }
-if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
- test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
- test -z "$striplib" && striplib="$STRIP --strip-unneeded"
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-# FIXME - insert some real tests, host_os isn't really good enough
- case $host_os in
- darwin*)
- if test -n "$STRIP" ; then
- striplib="$STRIP -x"
- old_striplib="$STRIP -S"
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
- else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
- fi
- ;;
- *)
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
- ;;
- esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
- # Report which library types will actually be built
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
-$as_echo_n "checking if libtool supports shared libraries... " >&6; }
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
-$as_echo "$can_build_shared" >&6; }
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
-$as_echo_n "checking whether to build shared libraries... " >&6; }
- test "$can_build_shared" = "no" && enable_shared=no
-
- # On AIX, shared libraries and static libraries use the same namespace, and
- # are all built from PIC.
- case $host_os in
- aix3*)
- test "$enable_shared" = yes && enable_static=no
- if test -n "$RANLIB"; then
- archive_cmds="$archive_cmds~\$RANLIB \$lib"
- postinstall_cmds='$RANLIB $lib'
- fi
- ;;
-
- aix[4-9]*)
- if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
- test "$enable_shared" = yes && enable_static=no
- fi
- ;;
- esac
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
-$as_echo "$enable_shared" >&6; }
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
-$as_echo_n "checking whether to build static libraries... " >&6; }
- # Make sure either enable_shared or enable_static is yes.
- test "$enable_shared" = yes || enable_static=yes
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
-$as_echo "$enable_static" >&6; }
-
-
-
-
-fi
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-CC="$lt_save_CC"
-
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-archive_cmds_need_lc_CXX=no
-allow_undefined_flag_CXX=
-always_export_symbols_CXX=no
-archive_expsym_cmds_CXX=
-compiler_needs_object_CXX=no
-export_dynamic_flag_spec_CXX=
-hardcode_direct_CXX=no
-hardcode_direct_absolute_CXX=no
-hardcode_libdir_flag_spec_CXX=
-hardcode_libdir_flag_spec_ld_CXX=
-hardcode_libdir_separator_CXX=
-hardcode_minus_L_CXX=no
-hardcode_shlibpath_var_CXX=unsupported
-hardcode_automatic_CXX=no
-inherit_rpath_CXX=no
-module_cmds_CXX=
-module_expsym_cmds_CXX=
-link_all_deplibs_CXX=unknown
-old_archive_cmds_CXX=$old_archive_cmds
-no_undefined_flag_CXX=
-whole_archive_flag_spec_CXX=
-enable_shared_with_static_runtimes_CXX=no
-
-# Source file extension for C++ test sources.
-ac_ext=cpp
-
-# Object file extension for compiled C++ test sources.
-objext=o
-objext_CXX=$objext
-
-# No sense in running all these tests if we already determined that
-# the CXX compiler isn't working. Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_caught_CXX_error" != yes; then
- # Code to be used in simple compile tests
- lt_simple_compile_test_code="int some_variable = 0;"
-
- # Code to be used in simple link tests
- lt_simple_link_test_code='int main(int, char *[]) { return(0); }'
-
- # ltmain only uses $CC for tagged configurations so make sure $CC is set.
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
- # save warnings/boilerplate of simple test code
- ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
- ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
- # Allow CC to be a program name with arguments.
- lt_save_CC=$CC
- lt_save_LD=$LD
- lt_save_GCC=$GCC
- GCC=$GXX
- lt_save_with_gnu_ld=$with_gnu_ld
- lt_save_path_LD=$lt_cv_path_LD
- if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
- lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
- else
- $as_unset lt_cv_prog_gnu_ld
- fi
- if test -n "${lt_cv_path_LDCXX+set}"; then
- lt_cv_path_LD=$lt_cv_path_LDCXX
- else
- $as_unset lt_cv_path_LD
- fi
- test -z "${LDCXX+set}" || LD=$LDCXX
- CC=${CXX-"c++"}
- compiler=$CC
- compiler_CXX=$CC
- for cc_temp in $compiler""; do
- case $cc_temp in
- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
- \-*) ;;
- *) break;;
- esac
-done
-cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"`
-
-
- if test -n "$compiler"; then
- # We don't want -fno-exception when compiling C++ code, so set the
- # no_builtin_flag separately
- if test "$GXX" = yes; then
- lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin'
- else
- lt_prog_compiler_no_builtin_flag_CXX=
- fi
-
- if test "$GXX" = yes; then
- # Set up default GNU C++ configuration
-
-
-
-# Check whether --with-gnu-ld was given.
-if test "${with_gnu_ld+set}" = set; then :
- withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
-else
- with_gnu_ld=no
-fi
-
-ac_prog=ld
-if test "$GCC" = yes; then
- # Check if gcc -print-prog-name=ld gives a path.
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
-$as_echo_n "checking for ld used by $CC... " >&6; }
- case $host in
- *-*-mingw*)
- # gcc leaves a trailing carriage return which upsets mingw
- ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
- *)
- ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
- esac
- case $ac_prog in
- # Accept absolute paths.
- [\\/]* | ?:[\\/]*)
- re_direlt='/[^/][^/]*/\.\./'
- # Canonicalize the pathname of ld
- ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
- while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
- ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
- done
- test -z "$LD" && LD="$ac_prog"
- ;;
- "")
- # If it fails, then pretend we aren't using GCC.
- ac_prog=ld
- ;;
- *)
- # If it is relative, then search for the first ld in PATH.
- with_gnu_ld=unknown
- ;;
- esac
-elif test "$with_gnu_ld" = yes; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
-$as_echo_n "checking for GNU ld... " >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
-$as_echo_n "checking for non-GNU ld... " >&6; }
-fi
-if ${lt_cv_path_LD+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- if test -z "$LD"; then
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
- lt_cv_path_LD="$ac_dir/$ac_prog"
- # Check to see if the program is GNU ld. I'd rather use --version,
- # but apparently some variants of GNU ld only accept -v.
- # Break only if it was the GNU/non-GNU ld that we prefer.
- case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
- *GNU* | *'with BFD'*)
- test "$with_gnu_ld" != no && break
- ;;
- *)
- test "$with_gnu_ld" != yes && break
- ;;
- esac
- fi
- done
- IFS="$lt_save_ifs"
-else
- lt_cv_path_LD="$LD" # Let the user override the test with a path.
-fi
-fi
-
-LD="$lt_cv_path_LD"
-if test -n "$LD"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
-$as_echo "$LD" >&6; }
-else
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
-$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
-if ${lt_cv_prog_gnu_ld+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- # I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
- lt_cv_prog_gnu_ld=yes
- ;;
-*)
- lt_cv_prog_gnu_ld=no
- ;;
-esac
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
-$as_echo "$lt_cv_prog_gnu_ld" >&6; }
-with_gnu_ld=$lt_cv_prog_gnu_ld
-
-
-
-
-
-
-
- # Check if GNU C++ uses GNU ld as the underlying linker, since the
- # archiving commands below assume that GNU ld is being used.
- if test "$with_gnu_ld" = yes; then
- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-
- hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir'
- export_dynamic_flag_spec_CXX='${wl}--export-dynamic'
-
- # If archive_cmds runs LD, not CC, wlarc should be empty
- # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
- # investigate it a little bit more. (MM)
- wlarc='${wl}'
-
- # ancient GNU ld didn't support --whole-archive et. al.
- if eval "`$CC -print-prog-name=ld` --help 2>&1" |
- $GREP 'no-whole-archive' > /dev/null; then
- whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- else
- whole_archive_flag_spec_CXX=
- fi
- else
- with_gnu_ld=no
- wlarc=
-
- # A generic and very simple default shared library creation
- # command for GNU C++ for the case where it uses the native
- # linker, instead of GNU ld. If possible, this setting should
- # overridden to take advantage of the native linker features on
- # the platform it is being used on.
- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
- fi
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
-
- else
- GXX=no
- with_gnu_ld=no
- wlarc=
- fi
-
- # PORTME: fill in a description of your system's C++ link characteristics
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
- ld_shlibs_CXX=yes
- case $host_os in
- aix3*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- aix[4-9]*)
- if test "$host_cpu" = ia64; then
- # On IA64, the linker does run time linking by default, so we don't
- # have to do anything special.
- aix_use_runtimelinking=no
- exp_sym_flag='-Bexport'
- no_entry_flag=""
- else
- aix_use_runtimelinking=no
-
- # Test if we are trying to use run time linking or normal
- # AIX style linking. If -brtl is somewhere in LDFLAGS, we
- # need to do runtime linking.
- case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
- for ld_flag in $LDFLAGS; do
- case $ld_flag in
- *-brtl*)
- aix_use_runtimelinking=yes
- break
- ;;
- esac
- done
- ;;
- esac
-
- exp_sym_flag='-bexport'
- no_entry_flag='-bnoentry'
- fi
-
- # When large executables or shared objects are built, AIX ld can
- # have problems creating the table of contents. If linking a library
- # or program results in "error TOC overflow" add -mminimal-toc to
- # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
- # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
- archive_cmds_CXX=''
- hardcode_direct_CXX=yes
- hardcode_direct_absolute_CXX=yes
- hardcode_libdir_separator_CXX=':'
- link_all_deplibs_CXX=yes
- file_list_spec_CXX='${wl}-f,'
-
- if test "$GXX" = yes; then
- case $host_os in aix4.[012]|aix4.[012].*)
- # We only want to do this on AIX 4.2 and lower, the check
- # below for broken collect2 doesn't work under 4.3+
- collect2name=`${CC} -print-prog-name=collect2`
- if test -f "$collect2name" &&
- strings "$collect2name" | $GREP resolve_lib_name >/dev/null
- then
- # We have reworked collect2
- :
- else
- # We have old collect2
- hardcode_direct_CXX=unsupported
- # It fails to find uninstalled libraries when the uninstalled
- # path is not listed in the libpath. Setting hardcode_minus_L
- # to unsupported forces relinking
- hardcode_minus_L_CXX=yes
- hardcode_libdir_flag_spec_CXX='-L$libdir'
- hardcode_libdir_separator_CXX=
- fi
- esac
- shared_flag='-shared'
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag="$shared_flag "'${wl}-G'
- fi
- else
- # not using gcc
- if test "$host_cpu" = ia64; then
- # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
- # chokes on -Wl,-G. The following line is correct:
- shared_flag='-G'
- else
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag='${wl}-G'
- else
- shared_flag='${wl}-bM:SRE'
- fi
- fi
- fi
-
- export_dynamic_flag_spec_CXX='${wl}-bexpall'
- # It seems that -bexpall does not export symbols beginning with
- # underscore (_), so it is better to generate a list of symbols to
- # export.
- always_export_symbols_CXX=yes
- if test "$aix_use_runtimelinking" = yes; then
- # Warning - without using the other runtime loading flags (-brtl),
- # -berok will link without error, but may produce a broken library.
- allow_undefined_flag_CXX='-berok'
- # Determine the default libpath from the value encoded in an empty
- # executable.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_link "$LINENO"; then :
-
-lt_aix_libpath_sed='
- /Import File Strings/,/^$/ {
- /^0/ {
- s/^0 *\(.*\)$/\1/
- p
- }
- }'
-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-# Check for a 64-bit object if we didn't find anything.
-if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
-
- archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
- else
- if test "$host_cpu" = ia64; then
- hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib'
- allow_undefined_flag_CXX="-z nodefs"
- archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_link "$LINENO"; then :
-
-lt_aix_libpath_sed='
- /Import File Strings/,/^$/ {
- /^0/ {
- s/^0 *\(.*\)$/\1/
- p
- }
- }'
-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-# Check for a 64-bit object if we didn't find anything.
-if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
- no_undefined_flag_CXX=' ${wl}-bernotok'
- allow_undefined_flag_CXX=' ${wl}-berok'
- # Exported symbols can be pulled into shared objects from archives
- whole_archive_flag_spec_CXX='$convenience'
- archive_cmds_need_lc_CXX=yes
- # This is similar to how AIX traditionally builds its shared
- # libraries.
- archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
- fi
- fi
- ;;
-
- beos*)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- allow_undefined_flag_CXX=unsupported
- # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
- # support --undefined. This deserves some investigation. FIXME
- archive_cmds_CXX='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- else
- ld_shlibs_CXX=no
- fi
- ;;
-
- chorus*)
- case $cc_basename in
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
- # as there is no search path for DLLs.
- hardcode_libdir_flag_spec_CXX='-L$libdir'
- allow_undefined_flag_CXX=unsupported
- always_export_symbols_CXX=no
- enable_shared_with_static_runtimes_CXX=yes
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- # If the export-symbols file already is a .def file (1st line
- # is EXPORTS), use it as is; otherwise, prepend...
- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
- cp $export_symbols $output_objdir/$soname.def;
- else
- echo EXPORTS > $output_objdir/$soname.def;
- cat $export_symbols >> $output_objdir/$soname.def;
- fi~
- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- else
- ld_shlibs_CXX=no
- fi
- ;;
- darwin* | rhapsody*)
-
-
- archive_cmds_need_lc_CXX=no
- hardcode_direct_CXX=no
- hardcode_automatic_CXX=yes
- hardcode_shlibpath_var_CXX=unsupported
- whole_archive_flag_spec_CXX=''
- link_all_deplibs_CXX=yes
- allow_undefined_flag_CXX="$_lt_dar_allow_undefined"
- case $cc_basename in
- ifort*) _lt_dar_can_shared=yes ;;
- *) _lt_dar_can_shared=$GCC ;;
- esac
- if test "$_lt_dar_can_shared" = "yes"; then
- output_verbose_link_cmd=echo
- archive_cmds_CXX="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
- module_cmds_CXX="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
- archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
- module_expsym_cmds_CXX="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
- if test "$lt_cv_apple_cc_single_mod" != "yes"; then
- archive_cmds_CXX="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
- archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
- fi
-
- else
- ld_shlibs_CXX=no
- fi
-
- ;;
-
- dgux*)
- case $cc_basename in
- ec++*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- ghcx*)
- # Green Hills C++ Compiler
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
- ;;
-
- freebsd[12]*)
- # C++ shared libraries reported to be fairly broken before
- # switch to ELF
- ld_shlibs_CXX=no
- ;;
-
- freebsd-elf*)
- archive_cmds_need_lc_CXX=no
- ;;
-
- freebsd* | dragonfly*)
- # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
- # conventions
- ld_shlibs_CXX=yes
- ;;
-
- gnu*)
- ;;
-
- hpux9*)
- hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator_CXX=:
- export_dynamic_flag_spec_CXX='${wl}-E'
- hardcode_direct_CXX=yes
- hardcode_minus_L_CXX=yes # Not in the search PATH,
- # but as the default
- # location of the library.
-
- case $cc_basename in
- CC*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- aCC*)
- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes; then
- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- fi
- ;;
- esac
- ;;
-
- hpux10*|hpux11*)
- if test $with_gnu_ld = no; then
- hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator_CXX=:
-
- case $host_cpu in
- hppa*64*|ia64*)
- ;;
- *)
- export_dynamic_flag_spec_CXX='${wl}-E'
- ;;
- esac
- fi
- case $host_cpu in
- hppa*64*|ia64*)
- hardcode_direct_CXX=no
- hardcode_shlibpath_var_CXX=no
- ;;
- *)
- hardcode_direct_CXX=yes
- hardcode_direct_absolute_CXX=yes
- hardcode_minus_L_CXX=yes # Not in the search PATH,
- # but as the default
- # location of the library.
- ;;
- esac
-
- case $cc_basename in
- CC*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- aCC*)
- case $host_cpu in
- hppa*64*)
- archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
- archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
- archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes; then
- if test $with_gnu_ld = no; then
- case $host_cpu in
- hppa*64*)
- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- fi
- else
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- fi
- ;;
- esac
- ;;
-
- interix[3-9]*)
- hardcode_direct_CXX=no
- hardcode_shlibpath_var_CXX=no
- hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir'
- export_dynamic_flag_spec_CXX='${wl}-E'
- # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
- # Instead, shared libraries are loaded at an image base (0x10000000 by
- # default) and relocated if they conflict, which is a slow very memory
- # consuming and fragmenting process. To avoid this, we pick a random,
- # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
- # time. Moving up from 0x10000000 also allows more sbrk(2) space.
- archive_cmds_CXX='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- archive_expsym_cmds_CXX='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- ;;
- irix5* | irix6*)
- case $cc_basename in
- CC*)
- # SGI C++
- archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
-
- # Archives containing C++ object files must be created using
- # "CC -ar", where "CC" is the IRIX C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs'
- ;;
- *)
- if test "$GXX" = yes; then
- if test "$with_gnu_ld" = no; then
- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` -o $lib'
- fi
- fi
- link_all_deplibs_CXX=yes
- ;;
- esac
- hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator_CXX=:
- inherit_rpath_CXX=yes
- ;;
-
- linux* | k*bsd*-gnu)
- case $cc_basename in
- KCC*)
- # Kuck and Associates, Inc. (KAI) C++ Compiler
-
- # KCC will only create a shared library if the output file
- # ends with ".so" (or ".sl" for HP-UX), so rename the library
- # to its proper name (with version) after linking.
- archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
- archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
-
- hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir'
- export_dynamic_flag_spec_CXX='${wl}--export-dynamic'
-
- # Archives containing C++ object files must be created using
- # "CC -Bstatic", where "CC" is the KAI C++ compiler.
- old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs'
- ;;
- icpc* | ecpc* )
- # Intel C++
- with_gnu_ld=yes
- # version 8.0 and above of icpc choke on multiply defined symbols
- # if we add $predep_objects and $postdep_objects, however 7.1 and
- # earlier do not add the objects themselves.
- case `$CC -V 2>&1` in
- *"Version 7."*)
- archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- ;;
- *) # Version 8.0 or newer
- tmp_idyn=
- case $host_cpu in
- ia64*) tmp_idyn=' -i_dynamic';;
- esac
- archive_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- ;;
- esac
- archive_cmds_need_lc_CXX=no
- hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir'
- export_dynamic_flag_spec_CXX='${wl}--export-dynamic'
- whole_archive_flag_spec_CXX='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
- ;;
- pgCC* | pgcpp*)
- # Portland Group C++ compiler
- case `$CC -V` in
- *pgCC\ [1-5]* | *pgcpp\ [1-5]*)
- prelink_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"'
- old_archive_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~
- $RANLIB $oldlib'
- archive_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- archive_expsym_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- *) # Version 6 will use weak symbols
- archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- esac
-
- hardcode_libdir_flag_spec_CXX='${wl}--rpath ${wl}$libdir'
- export_dynamic_flag_spec_CXX='${wl}--export-dynamic'
- whole_archive_flag_spec_CXX='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- ;;
- cxx*)
- # Compaq C++
- archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
-
- runpath_var=LD_RUN_PATH
- hardcode_libdir_flag_spec_CXX='-rpath $libdir'
- hardcode_libdir_separator_CXX=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- xl*)
- # IBM XL 8.0 on PPC, with GNU ld
- hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir'
- export_dynamic_flag_spec_CXX='${wl}--export-dynamic'
- archive_cmds_CXX='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds_CXX='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
- fi
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
- no_undefined_flag_CXX=' -zdefs'
- archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- archive_expsym_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
- hardcode_libdir_flag_spec_CXX='-R$libdir'
- whole_archive_flag_spec_CXX='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- compiler_needs_object_CXX=yes
-
- # Not sure whether something based on
- # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
- # would be better.
- output_verbose_link_cmd='echo'
-
- # Archives containing C++ object files must be created using
- # "CC -xar", where "CC" is the Sun C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs'
- ;;
- esac
- ;;
- esac
- ;;
-
- lynxos*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
-
- m88k*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
-
- mvs*)
- case $cc_basename in
- cxx*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
- wlarc=
- hardcode_libdir_flag_spec_CXX='-R$libdir'
- hardcode_direct_CXX=yes
- hardcode_shlibpath_var_CXX=no
- fi
- # Workaround some broken pre-1.5 toolchains
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
- ;;
-
- *nto* | *qnx*)
- ld_shlibs_CXX=yes
- ;;
-
- openbsd2*)
- # C++ shared libraries are fairly broken
- ld_shlibs_CXX=no
- ;;
-
- openbsd*)
- if test -f /usr/libexec/ld.so; then
- hardcode_direct_CXX=yes
- hardcode_shlibpath_var_CXX=no
- hardcode_direct_absolute_CXX=yes
- archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
- hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir'
- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
- export_dynamic_flag_spec_CXX='${wl}-E'
- whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- fi
- output_verbose_link_cmd=echo
- else
- ld_shlibs_CXX=no
- fi
- ;;
-
- osf3* | osf4* | osf5*)
- case $cc_basename in
- KCC*)
- # Kuck and Associates, Inc. (KAI) C++ Compiler
-
- # KCC will only create a shared library if the output file
- # ends with ".so" (or ".sl" for HP-UX), so rename the library
- # to its proper name (with version) after linking.
- archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
-
- hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir'
- hardcode_libdir_separator_CXX=:
-
- # Archives containing C++ object files must be created using
- # the KAI C++ compiler.
- case $host in
- osf3*) old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;;
- *) old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;;
- esac
- ;;
- RCC*)
- # Rational C++ 2.4.1
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- cxx*)
- case $host in
- osf3*)
- allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*'
- archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && $ECHO "X${wl}-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir'
- ;;
- *)
- allow_undefined_flag_CXX=' -expect_unresolved \*'
- archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
- echo "-hidden">> $lib.exp~
- $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~
- $RM $lib.exp'
- hardcode_libdir_flag_spec_CXX='-rpath $libdir'
- ;;
- esac
-
- hardcode_libdir_separator_CXX=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*'
- case $host in
- osf3*)
- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- *)
- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- esac
-
- hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator_CXX=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
-
- else
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- fi
- ;;
- esac
- ;;
-
- psos*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
-
- sunos4*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.x
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- lcc*)
- # Lucid
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
- ;;
-
- solaris*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- archive_cmds_need_lc_CXX=yes
- no_undefined_flag_CXX=' -zdefs'
- archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- hardcode_libdir_flag_spec_CXX='-R$libdir'
- hardcode_shlibpath_var_CXX=no
- case $host_os in
- solaris2.[0-5] | solaris2.[0-5].*) ;;
- *)
- # The compiler driver will combine and reorder linker options,
- # but understands `-z linker_flag'.
- # Supported since Solaris 2.6 (maybe 2.5.1?)
- whole_archive_flag_spec_CXX='-z allextract$convenience -z defaultextract'
- ;;
- esac
- link_all_deplibs_CXX=yes
-
- output_verbose_link_cmd='echo'
-
- # Archives containing C++ object files must be created using
- # "CC -xar", where "CC" is the Sun C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs'
- ;;
- gcx*)
- # Green Hills C++ Compiler
- archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
-
- # The C++ compiler must be used to create the archive.
- old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
- ;;
- *)
- # GNU C++ compiler with Solaris linker
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- no_undefined_flag_CXX=' ${wl}-z ${wl}defs'
- if $CC --version | $GREP -v '^2\.7' > /dev/null; then
- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
- else
- # g++ 2.7 appears to require `-G' NOT `-shared' on this
- # platform.
- archive_cmds_CXX='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
- fi
-
- hardcode_libdir_flag_spec_CXX='${wl}-R $wl$libdir'
- case $host_os in
- solaris2.[0-5] | solaris2.[0-5].*) ;;
- *)
- whole_archive_flag_spec_CXX='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
- ;;
- esac
- fi
- ;;
- esac
- ;;
-
- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
- no_undefined_flag_CXX='${wl}-z,text'
- archive_cmds_need_lc_CXX=no
- hardcode_shlibpath_var_CXX=no
- runpath_var='LD_RUN_PATH'
-
- case $cc_basename in
- CC*)
- archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- ;;
-
- sysv5* | sco3.2v5* | sco5v6*)
- # Note: We can NOT use -z defs as we might desire, because we do not
- # link with -lc, and that would cause any symbols used from libc to
- # always be unresolved, which means just about no library would
- # ever link correctly. If we're not using GNU ld we use -z text
- # though, which does catch some bad symbols but isn't as heavy-handed
- # as -z defs.
- no_undefined_flag_CXX='${wl}-z,text'
- allow_undefined_flag_CXX='${wl}-z,nodefs'
- archive_cmds_need_lc_CXX=no
- hardcode_shlibpath_var_CXX=no
- hardcode_libdir_flag_spec_CXX='${wl}-R,$libdir'
- hardcode_libdir_separator_CXX=':'
- link_all_deplibs_CXX=yes
- export_dynamic_flag_spec_CXX='${wl}-Bexport'
- runpath_var='LD_RUN_PATH'
-
- case $cc_basename in
- CC*)
- archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- ;;
-
- tandem*)
- case $cc_basename in
- NCC*)
- # NonStop-UX NCC 3.20
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
- ;;
-
- vxworks*)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
-
- *)
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
- ;;
- esac
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
-$as_echo "$ld_shlibs_CXX" >&6; }
- test "$ld_shlibs_CXX" = no && can_build_shared=no
-
- GCC_CXX="$GXX"
- LD_CXX="$LD"
-
- ## CAVEAT EMPTOR:
- ## There is no encapsulation within the following macros, do not change
- ## the running order or otherwise move them around unless you know exactly
- ## what you are doing...
- # Dependencies to place before and after the object being linked:
-predep_objects_CXX=
-postdep_objects_CXX=
-predeps_CXX=
-postdeps_CXX=
-compiler_lib_search_path_CXX=
-
-cat > conftest.$ac_ext <<_LT_EOF
-class Foo
-{
-public:
- Foo (void) { a = 0; }
-private:
- int a;
-};
-_LT_EOF
-
-if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }; then
- # Parse the compiler output and extract the necessary
- # objects, libraries and library flags.
-
- # Sentinel used to keep track of whether or not we are before
- # the conftest object file.
- pre_test_object_deps_done=no
-
- for p in `eval "$output_verbose_link_cmd"`; do
- case $p in
-
- -L* | -R* | -l*)
- # Some compilers place space between "-{L,R}" and the path.
- # Remove the space.
- if test $p = "-L" ||
- test $p = "-R"; then
- prev=$p
- continue
- else
- prev=
- fi
-
- if test "$pre_test_object_deps_done" = no; then
- case $p in
- -L* | -R*)
- # Internal compiler library paths should come after those
- # provided the user. The postdeps already come after the
- # user supplied libs so there is no need to process them.
- if test -z "$compiler_lib_search_path_CXX"; then
- compiler_lib_search_path_CXX="${prev}${p}"
- else
- compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} ${prev}${p}"
- fi
- ;;
- # The "-l" case would never come before the object being
- # linked, so don't bother handling this case.
- esac
- else
- if test -z "$postdeps_CXX"; then
- postdeps_CXX="${prev}${p}"
- else
- postdeps_CXX="${postdeps_CXX} ${prev}${p}"
- fi
- fi
- ;;
-
- *.$objext)
- # This assumes that the test object file only shows up
- # once in the compiler output.
- if test "$p" = "conftest.$objext"; then
- pre_test_object_deps_done=yes
- continue
- fi
-
- if test "$pre_test_object_deps_done" = no; then
- if test -z "$predep_objects_CXX"; then
- predep_objects_CXX="$p"
- else
- predep_objects_CXX="$predep_objects_CXX $p"
- fi
- else
- if test -z "$postdep_objects_CXX"; then
- postdep_objects_CXX="$p"
- else
- postdep_objects_CXX="$postdep_objects_CXX $p"
- fi
- fi
- ;;
-
- *) ;; # Ignore the rest.
-
- esac
- done
-
- # Clean up.
- rm -f a.out a.exe
-else
- echo "libtool.m4: error: problem compiling CXX test program"
-fi
-
-$RM -f confest.$objext
-
-# PORTME: override above test on systems where it is broken
-case $host_os in
-interix[3-9]*)
- # Interix 3.5 installs completely hosed .la files for C++, so rather than
- # hack all around it, let's just trust "g++" to DTRT.
- predep_objects_CXX=
- postdep_objects_CXX=
- postdeps_CXX=
- ;;
-
-linux*)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
-
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
- # -library=stlport4 depends on it.
- case " $CXX $CXXFLAGS " in
- *" -library=stlport4 "*)
- solaris_use_stlport4=yes
- ;;
- esac
-
- if test "$solaris_use_stlport4" != yes; then
- postdeps_CXX='-library=Cstd -library=Crun'
- fi
- ;;
- esac
- ;;
-
-solaris*)
- case $cc_basename in
- CC*)
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
- # -library=stlport4 depends on it.
- case " $CXX $CXXFLAGS " in
- *" -library=stlport4 "*)
- solaris_use_stlport4=yes
- ;;
- esac
-
- # Adding this requires a known-good setup of shared libraries for
- # Sun compiler versions before 5.6, else PIC objects from an old
- # archive will be linked into the output, leading to subtle bugs.
- if test "$solaris_use_stlport4" != yes; then
- postdeps_CXX='-library=Cstd -library=Crun'
- fi
- ;;
- esac
- ;;
-esac
-
-
-case " $postdeps_CXX " in
-*" -lc "*) archive_cmds_need_lc_CXX=no ;;
-esac
- compiler_lib_search_dirs_CXX=
-if test -n "${compiler_lib_search_path_CXX}"; then
- compiler_lib_search_dirs_CXX=`echo " ${compiler_lib_search_path_CXX}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- lt_prog_compiler_wl_CXX=
-lt_prog_compiler_pic_CXX=
-lt_prog_compiler_static_CXX=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- # C++ specific cases for pic, static, wl, etc.
- if test "$GXX" = yes; then
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_static_CXX='-static'
-
- case $host_os in
- aix*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- lt_prog_compiler_static_CXX='-Bstatic'
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- lt_prog_compiler_pic_CXX='-fPIC'
- ;;
- m68k)
- # FIXME: we need at least 68020 code to build shared libraries, but
- # adding the `-m68020' flag to GCC prevents building anything better,
- # like `-m68040'.
- lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4'
- ;;
- esac
- ;;
-
- beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
- # PIC is the default for these OSes.
- ;;
- mingw* | cygwin* | os2* | pw32* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- # Although the cygwin gcc ignores -fPIC, still need this for old-style
- # (--disable-auto-import) libraries
- lt_prog_compiler_pic_CXX='-DDLL_EXPORT'
- ;;
- darwin* | rhapsody*)
- # PIC is the default on this platform
- # Common symbols not allowed in MH_DYLIB files
- lt_prog_compiler_pic_CXX='-fno-common'
- ;;
- *djgpp*)
- # DJGPP does not support shared libraries at all
- lt_prog_compiler_pic_CXX=
- ;;
- interix[3-9]*)
- # Interix 3.x gcc -fpic/-fPIC options generate broken code.
- # Instead, we relocate shared libraries at runtime.
- ;;
- sysv4*MP*)
- if test -d /usr/nec; then
- lt_prog_compiler_pic_CXX=-Kconform_pic
- fi
- ;;
- hpux*)
- # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
- # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
- # sets the default TLS model and affects inlining.
- case $host_cpu in
- hppa*64*)
- ;;
- *)
- lt_prog_compiler_pic_CXX='-fPIC'
- ;;
- esac
- ;;
- *qnx* | *nto*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- lt_prog_compiler_pic_CXX='-fPIC -shared'
- ;;
- *)
- lt_prog_compiler_pic_CXX='-fPIC'
- ;;
- esac
- else
- case $host_os in
- aix[4-9]*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- lt_prog_compiler_static_CXX='-Bstatic'
- else
- lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp'
- fi
- ;;
- chorus*)
- case $cc_basename in
- cxch68*)
- # Green Hills C++ Compiler
- # _LT_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
- ;;
- esac
- ;;
- dgux*)
- case $cc_basename in
- ec++*)
- lt_prog_compiler_pic_CXX='-KPIC'
- ;;
- ghcx*)
- # Green Hills C++ Compiler
- lt_prog_compiler_pic_CXX='-pic'
- ;;
- *)
- ;;
- esac
- ;;
- freebsd* | dragonfly*)
- # FreeBSD uses GNU C++
- ;;
- hpux9* | hpux10* | hpux11*)
- case $cc_basename in
- CC*)
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_static_CXX='${wl}-a ${wl}archive'
- if test "$host_cpu" != ia64; then
- lt_prog_compiler_pic_CXX='+Z'
- fi
- ;;
- aCC*)
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_static_CXX='${wl}-a ${wl}archive'
- case $host_cpu in
- hppa*64*|ia64*)
- # +Z the default
- ;;
- *)
- lt_prog_compiler_pic_CXX='+Z'
- ;;
- esac
- ;;
- *)
- ;;
- esac
- ;;
- interix*)
- # This is c89, which is MS Visual C++ (no shared libs)
- # Anyone wants to do a port?
- ;;
- irix5* | irix6* | nonstopux*)
- case $cc_basename in
- CC*)
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_static_CXX='-non_shared'
- # CC pic flag -KPIC is the default.
- ;;
- *)
- ;;
- esac
- ;;
- linux* | k*bsd*-gnu)
- case $cc_basename in
- KCC*)
- # KAI C++ Compiler
- lt_prog_compiler_wl_CXX='--backend -Wl,'
- lt_prog_compiler_pic_CXX='-fPIC'
- ;;
- ecpc* )
- # old Intel C++ for x86_64 which still supported -KPIC.
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_pic_CXX='-KPIC'
- lt_prog_compiler_static_CXX='-static'
- ;;
- icpc* )
- # Intel C++, used to be incompatible with GCC.
- # ICC 10 doesn't accept -KPIC any more.
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_pic_CXX='-fPIC'
- lt_prog_compiler_static_CXX='-static'
- ;;
- pgCC* | pgcpp*)
- # Portland Group C++ compiler
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_pic_CXX='-fpic'
- lt_prog_compiler_static_CXX='-Bstatic'
- ;;
- cxx*)
- # Compaq C++
- # Make sure the PIC flag is empty. It appears that all Alpha
- # Linux and Compaq Tru64 Unix objects are PIC.
- lt_prog_compiler_pic_CXX=
- lt_prog_compiler_static_CXX='-non_shared'
- ;;
- xlc* | xlC*)
- # IBM XL 8.0 on PPC
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_pic_CXX='-qpic'
- lt_prog_compiler_static_CXX='-qstaticlink'
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
- lt_prog_compiler_pic_CXX='-KPIC'
- lt_prog_compiler_static_CXX='-Bstatic'
- lt_prog_compiler_wl_CXX='-Qoption ld '
- ;;
- esac
- ;;
- esac
- ;;
- lynxos*)
- ;;
- m88k*)
- ;;
- mvs*)
- case $cc_basename in
- cxx*)
- lt_prog_compiler_pic_CXX='-W c,exportall'
- ;;
- *)
- ;;
- esac
- ;;
- netbsd*)
- ;;
- *qnx* | *nto*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- lt_prog_compiler_pic_CXX='-fPIC -shared'
- ;;
- osf3* | osf4* | osf5*)
- case $cc_basename in
- KCC*)
- lt_prog_compiler_wl_CXX='--backend -Wl,'
- ;;
- RCC*)
- # Rational C++ 2.4.1
- lt_prog_compiler_pic_CXX='-pic'
- ;;
- cxx*)
- # Digital/Compaq C++
- lt_prog_compiler_wl_CXX='-Wl,'
- # Make sure the PIC flag is empty. It appears that all Alpha
- # Linux and Compaq Tru64 Unix objects are PIC.
- lt_prog_compiler_pic_CXX=
- lt_prog_compiler_static_CXX='-non_shared'
- ;;
- *)
- ;;
- esac
- ;;
- psos*)
- ;;
- solaris*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- lt_prog_compiler_pic_CXX='-KPIC'
- lt_prog_compiler_static_CXX='-Bstatic'
- lt_prog_compiler_wl_CXX='-Qoption ld '
- ;;
- gcx*)
- # Green Hills C++ Compiler
- lt_prog_compiler_pic_CXX='-PIC'
- ;;
- *)
- ;;
- esac
- ;;
- sunos4*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.x
- lt_prog_compiler_pic_CXX='-pic'
- lt_prog_compiler_static_CXX='-Bstatic'
- ;;
- lcc*)
- # Lucid
- lt_prog_compiler_pic_CXX='-pic'
- ;;
- *)
- ;;
- esac
- ;;
- sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
- case $cc_basename in
- CC*)
- lt_prog_compiler_wl_CXX='-Wl,'
- lt_prog_compiler_pic_CXX='-KPIC'
- lt_prog_compiler_static_CXX='-Bstatic'
- ;;
- esac
- ;;
- tandem*)
- case $cc_basename in
- NCC*)
- # NonStop-UX NCC 3.20
- lt_prog_compiler_pic_CXX='-KPIC'
- ;;
- *)
- ;;
- esac
- ;;
- vxworks*)
- ;;
- *)
- lt_prog_compiler_can_build_shared_CXX=no
- ;;
- esac
- fi
-
-case $host_os in
- # For platforms which do not support PIC, -DPIC is meaningless:
- *djgpp*)
- lt_prog_compiler_pic_CXX=
- ;;
- *)
- lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC"
- ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5
-$as_echo "$lt_prog_compiler_pic_CXX" >&6; }
-
-
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic_CXX"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_pic_works_CXX=no
- ac_outfile=conftest.$ac_objext
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
- lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- # The option is referenced via a variable to avoid confusing sed.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:13264: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>conftest.err)
- ac_status=$?
- cat conftest.err >&5
- echo "$as_me:13268: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s "$ac_outfile"; then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings other than the usual output.
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_pic_works_CXX=yes
- fi
- fi
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works_CXX" = xyes; then
- case $lt_prog_compiler_pic_CXX in
- "" | " "*) ;;
- *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;;
- esac
-else
- lt_prog_compiler_pic_CXX=
- lt_prog_compiler_can_build_shared_CXX=no
-fi
-
-fi
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_static_works_CXX=no
- save_LDFLAGS="$LDFLAGS"
- LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
- echo "$lt_simple_link_test_code" > conftest.$ac_ext
- if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
- # The linker can only warn and ignore the option if not recognized
- # So say no if there are warnings
- if test -s conftest.err; then
- # Append any errors to the config.log.
- cat conftest.err 1>&5
- $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if diff conftest.exp conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_static_works_CXX=yes
- fi
- else
- lt_cv_prog_compiler_static_works_CXX=yes
- fi
- fi
- $RM -r conftest*
- LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5
-$as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works_CXX" = xyes; then
- :
-else
- lt_prog_compiler_static_CXX=
-fi
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_c_o_CXX=no
- $RM -r conftest 2>/dev/null
- mkdir conftest
- cd conftest
- mkdir out
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- lt_compiler_flag="-o out/conftest2.$ac_objext"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:13363: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>out/conftest.err)
- ac_status=$?
- cat out/conftest.err >&5
- echo "$as_me:13367: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s out/conftest2.$ac_objext
- then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp
- $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
- if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_c_o_CXX=yes
- fi
- fi
- chmod u+w . 2>&5
- $RM conftest*
- # SGI C++ compiler will create directory out/ii_files/ for
- # template instantiation
- test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
- $RM out/* && rmdir out
- cd ..
- $RM -r conftest
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; }
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_CXX+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- lt_cv_prog_compiler_c_o_CXX=no
- $RM -r conftest 2>/dev/null
- mkdir conftest
- cd conftest
- mkdir out
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- lt_compiler_flag="-o out/conftest2.$ac_objext"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:13415: $lt_compile\"" >&5)
- (eval "$lt_compile" 2>out/conftest.err)
- ac_status=$?
- cat out/conftest.err >&5
- echo "$as_me:13419: \$? = $ac_status" >&5
- if (exit $ac_status) && test -s out/conftest2.$ac_objext
- then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp
- $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
- if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
- lt_cv_prog_compiler_c_o_CXX=yes
- fi
- fi
- chmod u+w . 2>&5
- $RM conftest*
- # SGI C++ compiler will create directory out/ii_files/ for
- # template instantiation
- test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
- $RM out/* && rmdir out
- cd ..
- $RM -r conftest
- $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o_CXX" = no && test "$need_locks" != no; then
- # do not overwrite the value of need_locks provided by the user
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
- hard_links=yes
- $RM conftest*
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- touch conftest.a
- ln conftest.a conftest.b 2>&5 || hard_links=no
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
- if test "$hard_links" = no; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
- need_locks=warn
- fi
-else
- need_locks=no
-fi
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- case $host_os in
- aix[4-9]*)
- # If we're using GNU nm, then we don't want the "-C" option.
- # -C means demangle to AIX nm, but means don't demangle with GNU nm
- if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
- export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- else
- export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- fi
- ;;
- pw32*)
- export_symbols_cmds_CXX="$ltdll_cmds"
- ;;
- cygwin* | mingw* | cegcc*)
- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
- ;;
- *)
- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- ;;
- esac
- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
-$as_echo "$ld_shlibs_CXX" >&6; }
-test "$ld_shlibs_CXX" = no && can_build_shared=no
-
-with_gnu_ld_CXX=$with_gnu_ld
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc_CXX" in
-x|xyes)
- # Assume -lc should be added
- archive_cmds_need_lc_CXX=yes
-
- if test "$enable_shared" = yes && test "$GCC" = yes; then
- case $archive_cmds_CXX in
- *'~'*)
- # FIXME: we may have to deal with multi-command sequences.
- ;;
- '$CC '*)
- # Test whether the compiler implicitly links with -lc since on some
- # systems, -lgcc has to come before -lc. If gcc already passes -lc
- # to ld, don't add -lc before -lgcc.
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
- $RM conftest*
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } 2>conftest.err; then
- soname=conftest
- lib=conftest
- libobjs=conftest.$ac_objext
- deplibs=
- wl=$lt_prog_compiler_wl_CXX
- pic_flag=$lt_prog_compiler_pic_CXX
- compiler_flags=-v
- linker_flags=-v
- verstring=
- output_objdir=.
- libname=conftest
- lt_save_allow_undefined_flag=$allow_undefined_flag_CXX
- allow_undefined_flag_CXX=
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
- (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
- ac_status=$?
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
- then
- archive_cmds_need_lc_CXX=no
- else
- archive_cmds_need_lc_CXX=yes
- fi
- allow_undefined_flag_CXX=$lt_save_allow_undefined_flag
- else
- cat conftest.err 1>&5
- fi
- $RM conftest*
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $archive_cmds_need_lc_CXX" >&5
-$as_echo "$archive_cmds_need_lc_CXX" >&6; }
- ;;
- esac
- fi
- ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
- shlibpath_var=LIBPATH
-
- # AIX 3 has no versioning support, so we append a major version to the name.
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
-
-aix[4-9]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- hardcode_into_libs=yes
- if test "$host_cpu" = ia64; then
- # AIX 5 supports IA64
- library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- else
- # With GCC up to 2.95.x, collect2 would create an import file
- # for dependence libraries. The import file would start with
- # the line `#! .'. This would cause the generated library to
- # depend on `.', always an invalid library. This was fixed in
- # development snapshots of GCC prior to 3.0.
- case $host_os in
- aix4 | aix4.[01] | aix4.[01].*)
- if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
- echo ' yes '
- echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
- :
- else
- can_build_shared=no
- fi
- ;;
- esac
- # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
- # soname into executable. Probably we can add versioning support to
- # collect2, so additional links can be useful in future.
- if test "$aix_use_runtimelinking" = yes; then
- # If using run time linking (on AIX 4.2 or later) use lib<name>.so
- # instead of lib<name>.a to let people know that these are not
- # typical AIX shared libraries.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- else
- # We preserve .a as extension for shared libraries through AIX4.2
- # and later when we are not doing run time linking.
- library_names_spec='${libname}${release}.a $libname.a'
- soname_spec='${libname}${release}${shared_ext}$major'
- fi
- shlibpath_var=LIBPATH
- fi
- ;;
-
-amigaos*)
- case $host_cpu in
- powerpc)
- # Since July 2007 AmigaOS4 officially supports .so libraries.
- # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- ;;
- m68k)
- library_names_spec='$libname.ixlibrary $libname.a'
- # Create ${libname}_ixlibrary.a entries in /sys/libs.
- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
- ;;
- esac
- ;;
-
-beos*)
- library_names_spec='${libname}${shared_ext}'
- dynamic_linker="$host_os ld.so"
- shlibpath_var=LIBRARY_PATH
- ;;
-
-bsdi[45]*)
- version_type=linux
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
- sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
- # the default ld.so.conf also contains /usr/contrib/lib and
- # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
- # libtool to hard-code these into programs
- ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
- version_type=windows
- shrext_cmds=".dll"
- need_version=no
- need_lib_prefix=no
-
- case $GCC,$host_os in
- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
- dldir=$destdir/`dirname \$dlpath`~
- test -d \$dldir || mkdir -p \$dldir~
- $install_prog $dir/$dlname \$dldir/$dlname~
- chmod a+x \$dldir/$dlname~
- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
- fi'
- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
- dlpath=$dir/\$dldll~
- $RM \$dlpath'
- shlibpath_overrides_runpath=yes
-
- case $host_os in
- cygwin*)
- # Cygwin DLLs use 'cyg' prefix rather than 'lib'
- soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib"
- ;;
- mingw* | cegcc*)
- # MinGW DLLs use traditional 'lib' prefix
- soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"`
- if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
- # It is most probably a Windows format PATH printed by
- # mingw gcc, but we are running on Cygwin. Gcc prints its search
- # path with ; separators, and with drive letters. We can handle the
- # drive letters (cygwin fileutils understands them), so leave them,
- # especially as we might pass files found there to a mingw objdump,
- # which wouldn't understand a cygwinified path. Ahh.
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
- else
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
- fi
- ;;
- pw32*)
- # pw32 DLLs use 'pw' prefix rather than 'lib'
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
- ;;
-
- *)
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
- ;;
- esac
- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-
-darwin* | rhapsody*)
- dynamic_linker="$host_os dyld"
- version_type=darwin
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
- soname_spec='${libname}${release}${major}$shared_ext'
- shlibpath_overrides_runpath=yes
- shlibpath_var=DYLD_LIBRARY_PATH
- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
- sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
- ;;
-
-dgux*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-freebsd1*)
- dynamic_linker=no
- ;;
-
-freebsd* | dragonfly*)
- # DragonFly does not have aout. When/if they implement a new
- # versioning mechanism, adjust this.
- if test -x /usr/bin/objformat; then
- objformat=`/usr/bin/objformat`
- else
- case $host_os in
- freebsd[123]*) objformat=aout ;;
- *) objformat=elf ;;
- esac
- fi
- version_type=freebsd-$objformat
- case $version_type in
- freebsd-elf*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- need_version=no
- need_lib_prefix=no
- ;;
- freebsd-*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
- need_version=yes
- ;;
- esac
- shlibpath_var=LD_LIBRARY_PATH
- case $host_os in
- freebsd2*)
- shlibpath_overrides_runpath=yes
- ;;
- freebsd3.[01]* | freebsdelf3.[01]*)
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
- freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
- *) # from 4.6 on, and DragonFly
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- esac
- ;;
-
-gnu*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- hardcode_into_libs=yes
- ;;
-
-hpux9* | hpux10* | hpux11*)
- # Give a soname corresponding to the major version so that dld.sl refuses to
- # link against other versions.
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- case $host_cpu in
- ia64*)
- shrext_cmds='.so'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.so"
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- if test "X$HPUX_IA64_MODE" = X32; then
- sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
- else
- sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
- fi
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- hppa*64*)
- shrext_cmds='.sl'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- *)
- shrext_cmds='.sl'
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=SHLIB_PATH
- shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
- esac
- # HP-UX runs *really* slowly unless shared libraries are mode 555.
- postinstall_cmds='chmod 555 $lib'
- ;;
-
-interix[3-9]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-irix5* | irix6* | nonstopux*)
- case $host_os in
- nonstopux*) version_type=nonstopux ;;
- *)
- if test "$lt_cv_prog_gnu_ld" = yes; then
- version_type=linux
- else
- version_type=irix
- fi ;;
- esac
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
- case $host_os in
- irix5* | nonstopux*)
- libsuff= shlibsuff=
- ;;
- *)
- case $LD in # libtool.m4 will add one of these switches to LD
- *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
- libsuff= shlibsuff= libmagic=32-bit;;
- *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
- libsuff=32 shlibsuff=N32 libmagic=N32;;
- *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
- libsuff=64 shlibsuff=64 libmagic=64-bit;;
- *) libsuff= shlibsuff= libmagic=never-match;;
- esac
- ;;
- esac
- shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
- sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
- hardcode_into_libs=yes
- ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
- dynamic_linker=no
- ;;
-
-# This must be Linux ELF.
-linux* | k*bsd*-gnu)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- # Some binutils ld are patched to set DT_RUNPATH
- save_LDFLAGS=$LDFLAGS
- save_libdir=$libdir
- eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \
- LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\""
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_link "$LINENO"; then :
- if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
- shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
- LDFLAGS=$save_LDFLAGS
- libdir=$save_libdir
-
- # This implies no fast_install, which is unacceptable.
- # Some rework will be needed to allow for fast_install
- # before this can be enabled.
- hardcode_into_libs=yes
-
- # Append ld.so.conf contents to the search path
- if test -f /etc/ld.so.conf; then
- lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '`
- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
- fi
-
- # We used to test for /lib/ld.so.1 and disable shared libraries on
- # powerpc, because MkLinux only supported shared libraries with the
- # GNU dynamic linker. Since this was broken with cross compilers,
- # most powerpc-linux boxes support dynamic linking these days and
- # people can always --disable-shared, the test was removed, and we
- # assume the GNU/Linux dynamic linker is in use.
- dynamic_linker='GNU/Linux ld.so'
- ;;
-
-netbsd*)
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- dynamic_linker='NetBSD (a.out) ld.so'
- else
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='NetBSD ld.elf_so'
- fi
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
-
-newsos6)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- ;;
-
-*nto* | *qnx*)
- version_type=qnx
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- dynamic_linker='ldqnx.so'
- ;;
-
-openbsd*)
- version_type=sunos
- sys_lib_dlsearch_path_spec="/usr/lib"
- need_lib_prefix=no
- # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
- case $host_os in
- openbsd3.3 | openbsd3.3.*) need_version=yes ;;
- *) need_version=no ;;
- esac
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- case $host_os in
- openbsd2.[89] | openbsd2.[89].*)
- shlibpath_overrides_runpath=no
- ;;
- *)
- shlibpath_overrides_runpath=yes
- ;;
- esac
- else
- shlibpath_overrides_runpath=yes
- fi
- ;;
-
-os2*)
- libname_spec='$name'
- shrext_cmds=".dll"
- need_lib_prefix=no
- library_names_spec='$libname${shared_ext} $libname.a'
- dynamic_linker='OS/2 ld.exe'
- shlibpath_var=LIBPATH
- ;;
-
-osf3* | osf4* | osf5*)
- version_type=osf
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
- sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
- ;;
-
-rdos*)
- dynamic_linker=no
- ;;
-
-solaris*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- # ldd complains unless libraries are executable
- postinstall_cmds='chmod +x $lib'
- ;;
-
-sunos4*)
- version_type=sunos
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- if test "$with_gnu_ld" = yes; then
- need_lib_prefix=no
- fi
- need_version=yes
- ;;
-
-sysv4 | sysv4.3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- case $host_vendor in
- sni)
- shlibpath_overrides_runpath=no
- need_lib_prefix=no
- runpath_var=LD_RUN_PATH
- ;;
- siemens)
- need_lib_prefix=no
- ;;
- motorola)
- need_lib_prefix=no
- need_version=no
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
- ;;
- esac
- ;;
-
-sysv4*MP*)
- if test -d /usr/nec ;then
- version_type=linux
- library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
- soname_spec='$libname${shared_ext}.$major'
- shlibpath_var=LD_LIBRARY_PATH
- fi
- ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- version_type=freebsd-elf
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- if test "$with_gnu_ld" = yes; then
- sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
- else
- sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
- case $host_os in
- sco3.2v5*)
- sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
- ;;
- esac
- fi
- sys_lib_dlsearch_path_spec='/usr/lib'
- ;;
-
-tpf*)
- # TPF is a cross-target only. Preferred cross-host = GNU/Linux.
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-uts4*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-*)
- dynamic_linker=no
- ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
- variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
- sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
- sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action_CXX=
-if test -n "$hardcode_libdir_flag_spec_CXX" ||
- test -n "$runpath_var_CXX" ||
- test "X$hardcode_automatic_CXX" = "Xyes" ; then
-
- # We can hardcode non-existent directories.
- if test "$hardcode_direct_CXX" != no &&
- # If the only mechanism to avoid hardcoding is shlibpath_var, we
- # have to relink, otherwise we might link with an installed library
- # when we should be linking with a yet-to-be-installed one
- ## test "$_LT_TAGVAR(hardcode_shlibpath_var, CXX)" != no &&
- test "$hardcode_minus_L_CXX" != no; then
- # Linking always hardcodes the temporary library directory.
- hardcode_action_CXX=relink
- else
- # We can link without hardcoding, and we can hardcode nonexisting dirs.
- hardcode_action_CXX=immediate
- fi
-else
- # We cannot hardcode anything, or else we can only hardcode existing
- # directories.
- hardcode_action_CXX=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5
-$as_echo "$hardcode_action_CXX" >&6; }
-
-if test "$hardcode_action_CXX" = relink ||
- test "$inherit_rpath_CXX" = yes; then
- # Fast installation is not supported
- enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
- test "$enable_shared" = no; then
- # Fast installation is not necessary
- enable_fast_install=needless
-fi
-
-
-
-
-
-
-
- fi # test -n "$compiler"
-
- CC=$lt_save_CC
- LDCXX=$LD
- LD=$lt_save_LD
- GCC=$lt_save_GCC
- with_gnu_ld=$lt_save_with_gnu_ld
- lt_cv_path_LDCXX=$lt_cv_path_LD
- lt_cv_path_LD=$lt_save_path_LD
- lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
- lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
-fi # test "$_lt_caught_CXX_error" != yes
-
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-
-
-
-
-
-
-
-
-
-
-
-
- ac_config_commands="$ac_config_commands libtool"
-
-
-
-
-# Only expand once:
-
-
-
-# Checks for libraries.
-
-echo "$as_me: this is boost.m4 serial 16" >&5
-boost_save_IFS=$IFS
-boost_version_req=
-IFS=.
-set x $boost_version_req 0 0 0
-IFS=$boost_save_IFS
-shift
-boost_version_req=`expr "$1" '*' 100000 + "$2" '*' 100 + "$3"`
-boost_version_req_string=$1.$2.$3
-
-# Check whether --with-boost was given.
-if test "${with_boost+set}" = set; then :
- withval=$with_boost;
-fi
-# If BOOST_ROOT is set and the user has not provided a value to
-# --with-boost, then treat BOOST_ROOT as if it the user supplied it.
-if test x"$BOOST_ROOT" != x; then
- if test x"$with_boost" = x; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: Detected BOOST_ROOT; continuing with --with-boost=$BOOST_ROOT" >&5
-$as_echo "$as_me: Detected BOOST_ROOT; continuing with --with-boost=$BOOST_ROOT" >&6;}
- with_boost=$BOOST_ROOT
- else
- { $as_echo "$as_me:${as_lineno-$LINENO}: Detected BOOST_ROOT=$BOOST_ROOT, but overridden by --with-boost=$with_boost" >&5
-$as_echo "$as_me: Detected BOOST_ROOT=$BOOST_ROOT, but overridden by --with-boost=$with_boost" >&6;}
- fi
-fi
-DISTCHECK_CONFIGURE_FLAGS="$DISTCHECK_CONFIGURE_FLAGS '--with-boost=$with_boost'"
-boost_save_CPPFLAGS=$CPPFLAGS
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Boost headers version >= $boost_version_req_string" >&5
-$as_echo_n "checking for Boost headers version >= $boost_version_req_string... " >&6; }
-if ${boost_cv_inc_path+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- boost_cv_inc_path=no
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <boost/version.hpp>
-#if !defined BOOST_VERSION
-# error BOOST_VERSION is not defined
-#elif BOOST_VERSION < $boost_version_req
-# error Boost headers version < $boost_version_req
-#endif
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
- # If the user provided a value to --with-boost, use it and only it.
- case $with_boost in #(
- ''|yes) set x '' /opt/local/include /usr/local/include /opt/include \
- /usr/include C:/Boost/include;; #(
- *) set x "$with_boost/include" "$with_boost";;
- esac
- shift
- for boost_dir
- do
- # Without --layout=system, Boost (or at least some versions) installs
- # itself in <prefix>/include/boost-<version>. This inner loop helps to
- # find headers in such directories.
- #
- # Any ${boost_dir}/boost-x_xx directories are searched in reverse version
- # order followed by ${boost_dir}. The final '.' is a sentinel for
- # searching $boost_dir" itself. Entries are whitespace separated.
- #
- # I didn't indent this loop on purpose (to avoid over-indented code)
- boost_layout_system_search_list=`cd "$boost_dir" 2>/dev/null \
- && ls -1 | "${GREP}" '^boost-' | sort -rn -t- -k2 \
- && echo .`
- for boost_inc in $boost_layout_system_search_list
- do
- if test x"$boost_inc" != x.; then
- boost_inc="$boost_dir/$boost_inc"
- else
- boost_inc="$boost_dir" # Uses sentinel in boost_layout_system_search_list
- fi
- if test x"$boost_inc" != x; then
- # We are going to check whether the version of Boost installed
- # in $boost_inc is usable by running a compilation that
- # #includes it. But if we pass a -I/some/path in which Boost
- # is not installed, the compiler will just skip this -I and
- # use other locations (either from CPPFLAGS, or from its list
- # of system include directories). As a result we would use
- # header installed on the machine instead of the /some/path
- # specified by the user. So in that precise case (trying
- # $boost_inc), make sure the version.hpp exists.
- #
- # Use test -e as there can be symlinks.
- test -e "$boost_inc/boost/version.hpp" || continue
- CPPFLAGS="$CPPFLAGS -I$boost_inc"
- fi
- if ac_fn_cxx_try_compile "$LINENO"; then :
- boost_cv_inc_path=yes
-else
- boost_cv_version=no
-fi
-rm -f core conftest.err conftest.$ac_objext
- if test x"$boost_cv_inc_path" = xyes; then
- if test x"$boost_inc" != x; then
- boost_cv_inc_path=$boost_inc
- fi
- break 2
- fi
- done
- done
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $boost_cv_inc_path" >&5
-$as_echo "$boost_cv_inc_path" >&6; }
- case $boost_cv_inc_path in #(
- no)
- boost_errmsg="cannot find Boost headers version >= $boost_version_req_string"
- as_fn_error $? "$boost_errmsg" "$LINENO" 5
-
- ;;#(
- yes)
- BOOST_CPPFLAGS=
- ;;#(
- *)
- BOOST_CPPFLAGS="-I$boost_cv_inc_path"
- ;;
- esac
- if test x"$boost_cv_inc_path" != xno; then
-
-$as_echo "#define HAVE_BOOST 1" >>confdefs.h
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Boost's header version" >&5
-$as_echo_n "checking for Boost's header version... " >&6; }
-if ${boost_cv_lib_version+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <boost/version.hpp>
-boost-lib-version = BOOST_LIB_VERSION
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
- tr -d '\r' |
- $SED -n -e "/^boost-lib-version = /{s///;s/\"//g;p;q;}" >conftest.i 2>&1; then :
- boost_cv_lib_version=`cat conftest.i`
-fi
-rm -rf conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $boost_cv_lib_version" >&5
-$as_echo "$boost_cv_lib_version" >&6; }
- # e.g. "134" for 1_34_1 or "135" for 1_35
- boost_major_version=`echo "$boost_cv_lib_version" | sed 's/_//;s/_.*//'`
- case $boost_major_version in #(
- '' | *[!0-9]*)
- as_fn_error $? "invalid value: boost_major_version=$boost_major_version" "$LINENO" 5
- ;;
- esac
-fi
-CPPFLAGS=$boost_save_CPPFLAGS
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for the toolset name used by Boost for $CXX" >&5
-$as_echo_n "checking for the toolset name used by Boost for $CXX... " >&6; }
-if ${boost_cv_lib_tag+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- boost_cv_lib_tag=unknown
-if test x$boost_cv_inc_path != xno; then
- ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
- # The following tests are mostly inspired by boost/config/auto_link.hpp
- # The list is sorted to most recent/common to oldest compiler (in order
- # to increase the likelihood of finding the right compiler with the
- # least number of compilation attempt).
- # Beware that some tests are sensible to the order (for instance, we must
- # look for MinGW before looking for GCC3).
- # I used one compilation test per compiler with a #error to recognize
- # each compiler so that it works even when cross-compiling (let me know
- # if you know a better approach).
- # Known missing tags (known from Boost's tools/build/v2/tools/common.jam):
- # como, edg, kcc, bck, mp, sw, tru, xlc
- # I'm not sure about my test for `il' (be careful: Intel's ICC pre-defines
- # the same defines as GCC's).
- for i in \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 6 && !defined __ICC @ gcc46" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 5 && !defined __ICC @ gcc45" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 4 && !defined __ICC @ gcc44" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 3 && !defined __ICC @ gcc43" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && !defined __ICC @ gcc42" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 1 && !defined __ICC @ gcc41" \
- "defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ == 0 && !defined __ICC @ gcc40" \
- "defined __GNUC__ && __GNUC__ == 3 && !defined __ICC \
- && (defined WIN32 || defined WINNT || defined _WIN32 || defined __WIN32 \
- || defined __WIN32__ || defined __WINNT || defined __WINNT__) @ mgw" \
- "defined __GNUC__ && __GNUC__ == 3 && __GNUC_MINOR__ == 4 && !defined __ICC @ gcc34" \
- "defined __GNUC__ && __GNUC__ == 3 && __GNUC_MINOR__ == 3 && !defined __ICC @ gcc33" \
- "defined _MSC_VER && _MSC_VER >= 1500 @ vc90" \
- "defined _MSC_VER && _MSC_VER == 1400 @ vc80" \
- "defined __GNUC__ && __GNUC__ == 3 && __GNUC_MINOR__ == 2 && !defined __ICC @ gcc32" \
- "defined _MSC_VER && _MSC_VER == 1310 @ vc71" \
- "defined __GNUC__ && __GNUC__ == 3 && __GNUC_MINOR__ == 1 && !defined __ICC @ gcc31" \
- "defined __GNUC__ && __GNUC__ == 3 && __GNUC_MINOR__ == 0 && !defined __ICC @ gcc30" \
- "defined __BORLANDC__ @ bcb" \
- "defined __ICC && (defined __unix || defined ) @ il" \
- "defined __ICL @ iw" \
- "defined _MSC_VER && _MSC_VER == 1300 @ vc7" \
- "defined __GNUC__ && __GNUC__ == 2 && __GNUC_MINOR__ == 95 && !defined __ICC @ gcc295" \
- "defined __MWERKS__ && __MWERKS__ <= 0x32FF @ cw9" \
- "defined _MSC_VER && _MSC_VER < 1300 && !defined UNDER_CE @ vc6" \
- "defined _MSC_VER && _MSC_VER < 1300 && defined UNDER_CE @ evc4" \
- "defined __MWERKS__ && __MWERKS__ <= 0x31FF @ cw8"
- do
- boost_tag_test=`expr "X$i" : 'X\([^@]*\) @ '`
- boost_tag=`expr "X$i" : 'X[^@]* @ \(.*\)'`
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-#if $boost_tag_test
-/* OK */
-#else
-# error $boost_tag_test
-#endif
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- boost_cv_lib_tag=$boost_tag; break
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
- done
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
- case $boost_cv_lib_tag in #(
- # Some newer (>= 1.35?) versions of Boost seem to only use "gcc" as opposed
- # to "gcc41" for instance.
- *-gcc | *'-gcc ') :;; #( Don't re-add -gcc: it's already in there.
- gcc*)
- boost_tag_x=
- case $host_os in #(
- darwin*)
- if test $boost_major_version -ge 136; then
- # The `x' added in r46793 of Boost.
- boost_tag_x=x
- fi;;
- esac
- # We can specify multiple tags in this variable because it's used by
- # BOOST_FIND_LIB that does a `for tag in -$boost_cv_lib_tag' ...
- boost_cv_lib_tag="$boost_tag_x$boost_cv_lib_tag -${boost_tag_x}gcc"
- ;; #(
- unknown)
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not figure out which toolset name to use for $CXX" >&5
-$as_echo "$as_me: WARNING: could not figure out which toolset name to use for $CXX" >&2;}
- boost_cv_lib_tag=
- ;;
- esac
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $boost_cv_lib_tag" >&5
-$as_echo "$boost_cv_lib_tag" >&6; }
-# Check whether --enable-static-boost was given.
-if test "${enable_static_boost+set}" = set; then :
- enableval=$enable_static_boost; enable_static_boost=yes
-else
- enable_static_boost=no
-fi
-
-# Check whether we do better use `mt' even though we weren't ask to.
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-#if defined _REENTRANT || defined _MT || defined __MT__
-/* use -mt */
-#else
-# error MT not needed
-#endif
-
-int
-main ()
-{
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- boost_guess_use_mt=:
-else
- boost_guess_use_mt=false
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-if test x"$boost_cv_inc_path" = xno; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: Boost not available, not searching for the Boost program_options library" >&5
-$as_echo "$as_me: Boost not available, not searching for the Boost program_options library" >&6;}
-else
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-if test x"$boost_cv_inc_path" = xno; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: Boost not available, not searching for boost/program_options.hpp" >&5
-$as_echo "$as_me: Boost not available, not searching for boost/program_options.hpp" >&6;}
-else
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-ac_fn_cxx_check_header_mongrel "$LINENO" "boost/program_options.hpp" "ac_cv_header_boost_program_options_hpp" "$ac_includes_default"
-if test "x$ac_cv_header_boost_program_options_hpp" = xyes; then :
-
-$as_echo "#define HAVE_BOOST_PROGRAM_OPTIONS_HPP 1" >>confdefs.h
-
-else
- as_fn_error $? "cannot find boost/program_options.hpp" "$LINENO" 5
-fi
-
-
-CPPFLAGS=$boost_save_CPPFLAGS
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-fi
-
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-# Now let's try to find the library. The algorithm is as follows: first look
-# for a given library name according to the user's PREFERRED-RT-OPT. For each
-# library name, we prefer to use the ones that carry the tag (toolset name).
-# Each library is searched through the various standard paths were Boost is
-# usually installed. If we can't find the standard variants, we try to
-# enforce -mt (for instance on MacOSX, libboost_threads.dylib doesn't exist
-# but there's -obviously- libboost_threads-mt.dylib).
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for the Boost program_options library" >&5
-$as_echo_n "checking for the Boost program_options library... " >&6; }
-if ${boost_cv_lib_program_options+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- boost_cv_lib_program_options=no
- case "" in #(
- mt | mt-) boost_mt=-mt; boost_rtopt=;; #(
- mt* | mt-*) boost_mt=-mt; boost_rtopt=`expr "X" : 'Xmt-*\(.*\)'`;; #(
- *) boost_mt=; boost_rtopt=;;
- esac
- if test $enable_static_boost = yes; then
- boost_rtopt="s$boost_rtopt"
- fi
- # Find the proper debug variant depending on what we've been asked to find.
- case $boost_rtopt in #(
- *d*) boost_rt_d=$boost_rtopt;; #(
- *[sgpn]*) # Insert the `d' at the right place (in between `sg' and `pn')
- boost_rt_d=`echo "$boost_rtopt" | sed 's/\(s*g*\)\(p*n*\)/\1\2/'`;; #(
- *) boost_rt_d='-d';;
- esac
- # If the PREFERRED-RT-OPT are not empty, prepend a `-'.
- test -n "$boost_rtopt" && boost_rtopt="-$boost_rtopt"
- $boost_guess_use_mt && boost_mt=-mt
- # Look for the abs path the static archive.
- # $libext is computed by Libtool but let's make sure it's non empty.
- test -z "$libext" &&
- as_fn_error $? "the libext variable is empty, did you invoke Libtool?" "$LINENO" 5
- boost_save_ac_objext=$ac_objext
- # Generate the test file.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <boost/program_options.hpp>
-
-int
-main ()
-{
-boost::program_options::options_description d("test");
- ;
- return 0;
-}
-_ACEOF
- if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_objext=do_not_rm_me_plz
-else
- as_fn_error $? "cannot compile a test that uses Boost program_options" "$LINENO" 5
-fi
-rm -f core conftest.err conftest.$ac_objext
- ac_objext=$boost_save_ac_objext
- boost_failed_libs=
-# Don't bother to ident the 6 nested for loops, only the 2 innermost ones
-# matter.
-for boost_tag_ in -$boost_cv_lib_tag ''; do
-for boost_ver_ in -$boost_cv_lib_version ''; do
-for boost_mt_ in $boost_mt -mt ''; do
-for boost_rtopt_ in $boost_rtopt '' -d; do
- for boost_lib in \
- boost_program_options$boost_tag_$boost_mt_$boost_rtopt_$boost_ver_ \
- boost_program_options$boost_tag_$boost_rtopt_$boost_ver_ \
- boost_program_options$boost_tag_$boost_mt_$boost_ver_ \
- boost_program_options$boost_tag_$boost_ver_
- do
- # Avoid testing twice the same lib
- case $boost_failed_libs in #(
- *@$boost_lib@*) continue;;
- esac
- # If with_boost is empty, we'll search in /lib first, which is not quite
- # right so instead we'll try to a location based on where the headers are.
- boost_tmp_lib=$with_boost
- test x"$with_boost" = x && boost_tmp_lib=${boost_cv_inc_path%/include}
- for boost_ldpath in "$boost_tmp_lib/lib" '' \
- /opt/local/lib* /usr/local/lib* /opt/lib* /usr/lib* \
- "$with_boost" C:/Boost/lib /lib*
- do
- test -e "$boost_ldpath" || continue
- boost_save_LDFLAGS=$LDFLAGS
- # Are we looking for a static library?
- case $boost_ldpath:$boost_rtopt_ in #(
- *?*:*s*) # Yes (Non empty boost_ldpath + s in rt opt)
- boost_cv_lib_program_options_LIBS="$boost_ldpath/lib$boost_lib.$libext"
- test -e "$boost_cv_lib_program_options_LIBS" || continue;; #(
- *) # No: use -lboost_foo to find the shared library.
- boost_cv_lib_program_options_LIBS="-l$boost_lib";;
- esac
- boost_save_LIBS=$LIBS
- LIBS="$boost_cv_lib_program_options_LIBS $LIBS"
- test x"$boost_ldpath" != x && LDFLAGS="$LDFLAGS -L$boost_ldpath"
- rm -f conftest$ac_exeext
-boost_save_ac_ext=$ac_ext
-boost_use_source=:
-# If we already have a .o, re-use it. We change $ac_ext so that $ac_link
-# tries to link the existing object file instead of compiling from source.
-test -f conftest.$ac_objext && ac_ext=$ac_objext && boost_use_source=false &&
- $as_echo "$as_me:${as_lineno-$LINENO}: re-using the existing conftest.$ac_objext" >&5
-if { { ac_try="$ac_link"
-case "(($ac_try" in
- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
- *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
- (eval "$ac_link") 2>conftest.err
- ac_status=$?
- if test -s conftest.err; then
- grep -v '^ *+' conftest.err >conftest.er1
- cat conftest.er1 >&5
- mv -f conftest.er1 conftest.err
- fi
- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; } && {
- test -z "$ac_cxx_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest$ac_exeext && {
- test "$cross_compiling" = yes ||
- $as_executable_p conftest$ac_exeext
- }; then :
- boost_cv_lib_program_options=yes
-else
- if $boost_use_source; then
- $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- fi
- boost_cv_lib_program_options=no
-fi
-ac_objext=$boost_save_ac_objext
-ac_ext=$boost_save_ac_ext
-rm -f core conftest.err conftest_ipa8_conftest.oo \
- conftest$ac_exeext
- ac_objext=$boost_save_ac_objext
- LDFLAGS=$boost_save_LDFLAGS
- LIBS=$boost_save_LIBS
- if test x"$boost_cv_lib_program_options" = xyes; then
- boost_cv_lib_program_options_LDFLAGS="-L$boost_ldpath -Wl,-R$boost_ldpath"
- boost_cv_lib_program_options_LDPATH="$boost_ldpath"
- break 6
- else
- boost_failed_libs="$boost_failed_libs@$boost_lib@"
- fi
- done
- done
-done
-done
-done
-done
-rm -f conftest.$ac_objext
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $boost_cv_lib_program_options" >&5
-$as_echo "$boost_cv_lib_program_options" >&6; }
-case $boost_cv_lib_program_options in #(
- no) $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
- as_fn_error $? "cannot find the flags to link with Boost program_options" "$LINENO" 5
- ;;
-esac
-BOOST_PROGRAM_OPTIONS_LDFLAGS=$boost_cv_lib_program_options_LDFLAGS
-BOOST_PROGRAM_OPTIONS_LDPATH=$boost_cv_lib_program_options_LDPATH
-BOOST_LDPATH=$boost_cv_lib_program_options_LDPATH
-BOOST_PROGRAM_OPTIONS_LIBS=$boost_cv_lib_program_options_LIBS
-CPPFLAGS=$boost_save_CPPFLAGS
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-fi
-
-
-
-if test x"$boost_cv_inc_path" = xno; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: Boost not available, not searching for boost/algorithm/string.hpp" >&5
-$as_echo "$as_me: Boost not available, not searching for boost/algorithm/string.hpp" >&6;}
-else
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-ac_fn_cxx_check_header_mongrel "$LINENO" "boost/algorithm/string.hpp" "ac_cv_header_boost_algorithm_string_hpp" "$ac_includes_default"
-if test "x$ac_cv_header_boost_algorithm_string_hpp" = xyes; then :
-
-$as_echo "#define HAVE_BOOST_ALGORITHM_STRING_HPP 1" >>confdefs.h
-
-else
- as_fn_error $? "cannot find boost/algorithm/string.hpp" "$LINENO" 5
-fi
-
-
-CPPFLAGS=$boost_save_CPPFLAGS
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-fi
-
-
-
-if test x"$boost_cv_inc_path" = xno; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: Boost not available, not searching for boost/unordered_map.hpp" >&5
-$as_echo "$as_me: Boost not available, not searching for boost/unordered_map.hpp" >&6;}
-else
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-ac_fn_cxx_check_header_mongrel "$LINENO" "boost/unordered_map.hpp" "ac_cv_header_boost_unordered_map_hpp" "$ac_includes_default"
-if test "x$ac_cv_header_boost_unordered_map_hpp" = xyes; then :
-
-$as_echo "#define HAVE_BOOST_UNORDERED_MAP_HPP 1" >>confdefs.h
-
-else
- as_fn_error $? "cannot find boost/unordered_map.hpp" "$LINENO" 5
-fi
-
-
-CPPFLAGS=$boost_save_CPPFLAGS
-ac_ext=cpp
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-fi
-
-
-
-# Checks for header files.
-
-# Checks for typedefs, structures, and compiler characteristics.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdbool.h that conforms to C99" >&5
-$as_echo_n "checking for stdbool.h that conforms to C99... " >&6; }
-if ${ac_cv_header_stdbool_h+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-
-#include <stdbool.h>
-#ifndef bool
- "error: bool is not defined"
-#endif
-#ifndef false
- "error: false is not defined"
-#endif
-#if false
- "error: false is not 0"
-#endif
-#ifndef true
- "error: true is not defined"
-#endif
-#if true != 1
- "error: true is not 1"
-#endif
-#ifndef __bool_true_false_are_defined
- "error: __bool_true_false_are_defined is not defined"
-#endif
-
- struct s { _Bool s: 1; _Bool t; } s;
-
- char a[true == 1 ? 1 : -1];
- char b[false == 0 ? 1 : -1];
- char c[__bool_true_false_are_defined == 1 ? 1 : -1];
- char d[(bool) 0.5 == true ? 1 : -1];
- /* See body of main program for 'e'. */
- char f[(_Bool) 0.0 == false ? 1 : -1];
- char g[true];
- char h[sizeof (_Bool)];
- char i[sizeof s.t];
- enum { j = false, k = true, l = false * true, m = true * 256 };
- /* The following fails for
- HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */
- _Bool n[m];
- char o[sizeof n == m * sizeof n[0] ? 1 : -1];
- char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1];
- /* Catch a bug in an HP-UX C compiler. See
- http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html
- http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html
- */
- _Bool q = true;
- _Bool *pq = &q;
-
-int
-main ()
-{
-
- bool e = &s;
- *pq |= q;
- *pq |= ! q;
- /* Refer to every declared value, to avoid compiler optimizations. */
- return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l
- + !m + !n + !o + !p + !q + !pq);
-
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_compile "$LINENO"; then :
- ac_cv_header_stdbool_h=yes
-else
- ac_cv_header_stdbool_h=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdbool_h" >&5
-$as_echo "$ac_cv_header_stdbool_h" >&6; }
-ac_fn_cxx_check_type "$LINENO" "_Bool" "ac_cv_type__Bool" "$ac_includes_default"
-if test "x$ac_cv_type__Bool" = xyes; then :
-
-cat >>confdefs.h <<_ACEOF
-#define HAVE__BOOL 1
-_ACEOF
-
-
-fi
-
-if test $ac_cv_header_stdbool_h = yes; then
-
-$as_echo "#define HAVE_STDBOOL_H 1" >>confdefs.h
-
-fi
-
-ac_fn_cxx_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default"
-if test "x$ac_cv_type_size_t" = xyes; then :
-
-else
-
-cat >>confdefs.h <<_ACEOF
-#define size_t unsigned int
-_ACEOF
-
-fi
-
-
-# Checks for library functions.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for error_at_line" >&5
-$as_echo_n "checking for error_at_line... " >&6; }
-if ${ac_cv_lib_error_at_line+:} false; then :
- $as_echo_n "(cached) " >&6
-else
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-#include <error.h>
-int
-main ()
-{
-error_at_line (0, 0, "", 0, "an error occurred");
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_cxx_try_link "$LINENO"; then :
- ac_cv_lib_error_at_line=yes
-else
- ac_cv_lib_error_at_line=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_error_at_line" >&5
-$as_echo "$ac_cv_lib_error_at_line" >&6; }
-if test $ac_cv_lib_error_at_line = no; then
- case " $LIBOBJS " in
- *" error.$ac_objext "* ) ;;
- *) LIBOBJS="$LIBOBJS error.$ac_objext"
- ;;
-esac
-
-fi
-
-
-ac_config_files="$ac_config_files Makefile tools/Makefile"
-
-cat >confcache <<\_ACEOF
-# This file is a shell script that caches the results of configure
-# tests run on this system so they can be shared between configure
-# scripts and configure runs, see configure's option --config-cache.
-# It is not useful on other systems. If it contains results you don't
-# want to keep, you may remove or edit it.
-#
-# config.status only pays attention to the cache file if you give it
-# the --recheck option to rerun configure.
-#
-# `ac_cv_env_foo' variables (set or unset) will be overridden when
-# loading this file, other *unset* `ac_cv_foo' will be assigned the
-# following values.
-
-_ACEOF
-
-# The following way of writing the cache mishandles newlines in values,
-# but we know of no workaround that is simple, portable, and efficient.
-# So, we kill variables containing newlines.
-# Ultrix sh set writes to stderr and can't be redirected directly,
-# and sets the high bit in the cache file unless we assign to the vars.
-(
- for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
- eval ac_val=\$$ac_var
- case $ac_val in #(
- *${as_nl}*)
- case $ac_var in #(
- *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
- esac
- case $ac_var in #(
- _ | IFS | as_nl) ;; #(
- BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
- *) { eval $ac_var=; unset $ac_var;} ;;
- esac ;;
- esac
- done
-
- (set) 2>&1 |
- case $as_nl`(ac_space=' '; set) 2>&1` in #(
- *${as_nl}ac_space=\ *)
- # `set' does not quote correctly, so add quotes: double-quote
- # substitution turns \\\\ into \\, and sed turns \\ into \.
- sed -n \
- "s/'/'\\\\''/g;
- s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
- ;; #(
- *)
- # `set' quotes correctly as required by POSIX, so do not add quotes.
- sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
- ;;
- esac |
- sort
-) |
- sed '
- /^ac_cv_env_/b end
- t clear
- :clear
- s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
- t end
- s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
- :end' >>confcache
-if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
- if test -w "$cache_file"; then
- if test "x$cache_file" != "x/dev/null"; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
-$as_echo "$as_me: updating cache $cache_file" >&6;}
- if test ! -f "$cache_file" || test -h "$cache_file"; then
- cat confcache >"$cache_file"
- else
- case $cache_file in #(
- */* | ?:*)
- mv -f confcache "$cache_file"$$ &&
- mv -f "$cache_file"$$ "$cache_file" ;; #(
- *)
- mv -f confcache "$cache_file" ;;
- esac
- fi
- fi
- else
- { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
-$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
- fi
-fi
-rm -f confcache
-
-test "x$prefix" = xNONE && prefix=$ac_default_prefix
-# Let make expand exec_prefix.
-test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
-
-DEFS=-DHAVE_CONFIG_H
-
-ac_libobjs=
-ac_ltlibobjs=
-U=
-for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
- # 1. Remove the extension, and $U if already installed.
- ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
- ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
- # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR
- # will be set to the directory where LIBOBJS objects are built.
- as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
- as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
-done
-LIBOBJS=$ac_libobjs
-
-LTLIBOBJS=$ac_ltlibobjs
-
-
- if test -n "$EXEEXT"; then
- am__EXEEXT_TRUE=
- am__EXEEXT_FALSE='#'
-else
- am__EXEEXT_TRUE='#'
- am__EXEEXT_FALSE=
-fi
-
-if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
- as_fn_error $? "conditional \"AMDEP\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then
- as_fn_error $? "conditional \"am__fastdepCXX\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
- as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then
- as_fn_error $? "conditional \"am__fastdepCXX\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-
-: "${CONFIG_STATUS=./config.status}"
-ac_write_fail=0
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files $CONFIG_STATUS"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
-$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
-as_write_fail=0
-cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
-#! $SHELL
-# Generated by $as_me.
-# Run this file to recreate the current configuration.
-# Compiler output produced by configure, useful for debugging
-# configure, is in config.log if it exists.
-
-debug=false
-ac_cs_recheck=false
-ac_cs_silent=false
-
-SHELL=\${CONFIG_SHELL-$SHELL}
-export SHELL
-_ASEOF
-cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
- emulate sh
- NULLCMD=:
- # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
- # is contrary to our usage. Disable this feature.
- alias -g '${1+"$@"}'='"$@"'
- setopt NO_GLOB_SUBST
-else
- case `(set -o) 2>/dev/null` in #(
- *posix*) :
- set -o posix ;; #(
- *) :
- ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
- && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
- as_echo='print -r --'
- as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
- as_echo='printf %s\n'
- as_echo_n='printf %s'
-else
- if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
- as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
- as_echo_n='/usr/ucb/echo -n'
- else
- as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
- as_echo_n_body='eval
- arg=$1;
- case $arg in #(
- *"$as_nl"*)
- expr "X$arg" : "X\\(.*\\)$as_nl";
- arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
- esac;
- expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
- '
- export as_echo_n_body
- as_echo_n='sh -c $as_echo_n_body as_echo'
- fi
- export as_echo_body
- as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
- PATH_SEPARATOR=:
- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
- (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
- PATH_SEPARATOR=';'
- }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order. Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" "" $as_nl"
-
-# Find who we are. Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
- *[\\/]* ) as_myself=$0 ;;
- *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
- done
-IFS=$as_save_IFS
-
- ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
- as_myself=$0
-fi
-if test ! -f "$as_myself"; then
- $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
- exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there. '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
- && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
- as_status=$1; test $as_status -eq 0 && as_status=1
- if test "$4"; then
- as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
- fi
- $as_echo "$as_me: error: $2" >&2
- as_fn_exit $as_status
-} # as_fn_error
-
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
- return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
- set +e
- as_fn_set_status $1
- exit $1
-} # as_fn_exit
-
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
- { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
- eval 'as_fn_append ()
- {
- eval $1+=\$2
- }'
-else
- as_fn_append ()
- {
- eval $1=\$$1\$2
- }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
- eval 'as_fn_arith ()
- {
- as_val=$(( $* ))
- }'
-else
- as_fn_arith ()
- {
- as_val=`expr "$@" || test $? -eq 1`
- }
-fi # as_fn_arith
-
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
- test "X`expr 00001 : '.*\(...\)'`" = X001; then
- as_expr=expr
-else
- as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
- as_basename=basename
-else
- as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
- as_dirname=dirname
-else
- as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
- X"$0" : 'X\(//\)$' \| \
- X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
- sed '/^.*\/\([^/][^/]*\)\/*$/{
- s//\1/
- q
- }
- /^X\/\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\/\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
- case `echo 'xy\c'` in
- *c*) ECHO_T=' ';; # ECHO_T is single tab character.
- xy) ECHO_C='\c';;
- *) echo `echo ksh88 bug on AIX 6.1` > /dev/null
- ECHO_T=' ';;
- esac;;
-*)
- ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
- rm -f conf$$.dir/conf$$.file
-else
- rm -f conf$$.dir
- mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
- if ln -s conf$$.file conf$$ 2>/dev/null; then
- as_ln_s='ln -s'
- # ... but there are two gotchas:
- # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
- # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
- # In both cases, we have to default to `cp -p'.
- ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
- as_ln_s='cp -p'
- elif ln conf$$.file conf$$ 2>/dev/null; then
- as_ln_s=ln
- else
- as_ln_s='cp -p'
- fi
-else
- as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
- case $as_dir in #(
- -*) as_dir=./$as_dir;;
- esac
- test -d "$as_dir" || eval $as_mkdir_p || {
- as_dirs=
- while :; do
- case $as_dir in #(
- *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
- *) as_qdir=$as_dir;;
- esac
- as_dirs="'$as_qdir' $as_dirs"
- as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$as_dir" : 'X\(//\)[^/]' \| \
- X"$as_dir" : 'X\(//\)$' \| \
- X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- test -d "$as_dir" && break
- done
- test -z "$as_dirs" || eval "mkdir $as_dirs"
- } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-if mkdir -p . 2>/dev/null; then
- as_mkdir_p='mkdir -p "$as_dir"'
-else
- test -d ./-p && rmdir ./-p
- as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
- as_test_x='test -x'
-else
- if ls -dL / >/dev/null 2>&1; then
- as_ls_L_option=L
- else
- as_ls_L_option=
- fi
- as_test_x='
- eval sh -c '\''
- if test -d "$1"; then
- test -d "$1/.";
- else
- case $1 in #(
- -*)set "./$1";;
- esac;
- case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
- ???[sx]*):;;*)false;;esac;fi
- '\'' sh
- '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-
-exec 6>&1
-## ----------------------------------- ##
-## Main body of $CONFIG_STATUS script. ##
-## ----------------------------------- ##
-_ASEOF
-test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# Save the log message, to keep $0 and so on meaningful, and to
-# report actual input values of CONFIG_FILES etc. instead of their
-# values after options handling.
-ac_log="
-This file was extended by moses-compact-rule-table $as_me 1.0, which was
-generated by GNU Autoconf 2.68. Invocation command line was
-
- CONFIG_FILES = $CONFIG_FILES
- CONFIG_HEADERS = $CONFIG_HEADERS
- CONFIG_LINKS = $CONFIG_LINKS
- CONFIG_COMMANDS = $CONFIG_COMMANDS
- $ $0 $@
-
-on `(hostname || uname -n) 2>/dev/null | sed 1q`
-"
-
-_ACEOF
-
-case $ac_config_files in *"
-"*) set x $ac_config_files; shift; ac_config_files=$*;;
-esac
-
-case $ac_config_headers in *"
-"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
-esac
-
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-# Files that config.status was made for.
-config_files="$ac_config_files"
-config_headers="$ac_config_headers"
-config_commands="$ac_config_commands"
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-ac_cs_usage="\
-\`$as_me' instantiates files and other configuration actions
-from templates according to the current configuration. Unless the files
-and actions are specified as TAGs, all are instantiated by default.
-
-Usage: $0 [OPTION]... [TAG]...
-
- -h, --help print this help, then exit
- -V, --version print version number and configuration settings, then exit
- --config print configuration, then exit
- -q, --quiet, --silent
- do not print progress messages
- -d, --debug don't remove temporary files
- --recheck update $as_me by reconfiguring in the same conditions
- --file=FILE[:TEMPLATE]
- instantiate the configuration file FILE
- --header=FILE[:TEMPLATE]
- instantiate the configuration header FILE
-
-Configuration files:
-$config_files
-
-Configuration headers:
-$config_headers
-
-Configuration commands:
-$config_commands
-
-Report bugs to <moses-support@mit.edu>."
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
-ac_cs_version="\\
-moses-compact-rule-table config.status 1.0
-configured by $0, generated by GNU Autoconf 2.68,
- with options \\"\$ac_cs_config\\"
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This config.status script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it."
-
-ac_pwd='$ac_pwd'
-srcdir='$srcdir'
-INSTALL='$INSTALL'
-MKDIR_P='$MKDIR_P'
-AWK='$AWK'
-test -n "\$AWK" || AWK=awk
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# The default lists apply if the user does not specify any file.
-ac_need_defaults=:
-while test $# != 0
-do
- case $1 in
- --*=?*)
- ac_option=`expr "X$1" : 'X\([^=]*\)='`
- ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
- ac_shift=:
- ;;
- --*=)
- ac_option=`expr "X$1" : 'X\([^=]*\)='`
- ac_optarg=
- ac_shift=:
- ;;
- *)
- ac_option=$1
- ac_optarg=$2
- ac_shift=shift
- ;;
- esac
-
- case $ac_option in
- # Handling of the options.
- -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
- ac_cs_recheck=: ;;
- --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
- $as_echo "$ac_cs_version"; exit ;;
- --config | --confi | --conf | --con | --co | --c )
- $as_echo "$ac_cs_config"; exit ;;
- --debug | --debu | --deb | --de | --d | -d )
- debug=: ;;
- --file | --fil | --fi | --f )
- $ac_shift
- case $ac_optarg in
- *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
- '') as_fn_error $? "missing file argument" ;;
- esac
- as_fn_append CONFIG_FILES " '$ac_optarg'"
- ac_need_defaults=false;;
- --header | --heade | --head | --hea )
- $ac_shift
- case $ac_optarg in
- *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
- esac
- as_fn_append CONFIG_HEADERS " '$ac_optarg'"
- ac_need_defaults=false;;
- --he | --h)
- # Conflict between --help and --header
- as_fn_error $? "ambiguous option: \`$1'
-Try \`$0 --help' for more information.";;
- --help | --hel | -h )
- $as_echo "$ac_cs_usage"; exit ;;
- -q | -quiet | --quiet | --quie | --qui | --qu | --q \
- | -silent | --silent | --silen | --sile | --sil | --si | --s)
- ac_cs_silent=: ;;
-
- # This is an error.
- -*) as_fn_error $? "unrecognized option: \`$1'
-Try \`$0 --help' for more information." ;;
-
- *) as_fn_append ac_config_targets " $1"
- ac_need_defaults=false ;;
-
- esac
- shift
-done
-
-ac_configure_extra_args=
-
-if $ac_cs_silent; then
- exec 6>/dev/null
- ac_configure_extra_args="$ac_configure_extra_args --silent"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-if \$ac_cs_recheck; then
- set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
- shift
- \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
- CONFIG_SHELL='$SHELL'
- export CONFIG_SHELL
- exec "\$@"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-exec 5>>config.log
-{
- echo
- sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
-## Running $as_me. ##
-_ASBOX
- $as_echo "$ac_log"
-} >&5
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-#
-# INIT-COMMANDS
-#
-AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
-
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-sed_quote_subst='$sed_quote_subst'
-double_quote_subst='$double_quote_subst'
-delay_variable_subst='$delay_variable_subst'
-macro_version='`$ECHO "X$macro_version" | $Xsed -e "$delay_single_quote_subst"`'
-macro_revision='`$ECHO "X$macro_revision" | $Xsed -e "$delay_single_quote_subst"`'
-enable_shared='`$ECHO "X$enable_shared" | $Xsed -e "$delay_single_quote_subst"`'
-enable_static='`$ECHO "X$enable_static" | $Xsed -e "$delay_single_quote_subst"`'
-pic_mode='`$ECHO "X$pic_mode" | $Xsed -e "$delay_single_quote_subst"`'
-enable_fast_install='`$ECHO "X$enable_fast_install" | $Xsed -e "$delay_single_quote_subst"`'
-host_alias='`$ECHO "X$host_alias" | $Xsed -e "$delay_single_quote_subst"`'
-host='`$ECHO "X$host" | $Xsed -e "$delay_single_quote_subst"`'
-host_os='`$ECHO "X$host_os" | $Xsed -e "$delay_single_quote_subst"`'
-build_alias='`$ECHO "X$build_alias" | $Xsed -e "$delay_single_quote_subst"`'
-build='`$ECHO "X$build" | $Xsed -e "$delay_single_quote_subst"`'
-build_os='`$ECHO "X$build_os" | $Xsed -e "$delay_single_quote_subst"`'
-SED='`$ECHO "X$SED" | $Xsed -e "$delay_single_quote_subst"`'
-Xsed='`$ECHO "X$Xsed" | $Xsed -e "$delay_single_quote_subst"`'
-GREP='`$ECHO "X$GREP" | $Xsed -e "$delay_single_quote_subst"`'
-EGREP='`$ECHO "X$EGREP" | $Xsed -e "$delay_single_quote_subst"`'
-FGREP='`$ECHO "X$FGREP" | $Xsed -e "$delay_single_quote_subst"`'
-LD='`$ECHO "X$LD" | $Xsed -e "$delay_single_quote_subst"`'
-NM='`$ECHO "X$NM" | $Xsed -e "$delay_single_quote_subst"`'
-LN_S='`$ECHO "X$LN_S" | $Xsed -e "$delay_single_quote_subst"`'
-max_cmd_len='`$ECHO "X$max_cmd_len" | $Xsed -e "$delay_single_quote_subst"`'
-ac_objext='`$ECHO "X$ac_objext" | $Xsed -e "$delay_single_quote_subst"`'
-exeext='`$ECHO "X$exeext" | $Xsed -e "$delay_single_quote_subst"`'
-lt_unset='`$ECHO "X$lt_unset" | $Xsed -e "$delay_single_quote_subst"`'
-lt_SP2NL='`$ECHO "X$lt_SP2NL" | $Xsed -e "$delay_single_quote_subst"`'
-lt_NL2SP='`$ECHO "X$lt_NL2SP" | $Xsed -e "$delay_single_quote_subst"`'
-reload_flag='`$ECHO "X$reload_flag" | $Xsed -e "$delay_single_quote_subst"`'
-reload_cmds='`$ECHO "X$reload_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-OBJDUMP='`$ECHO "X$OBJDUMP" | $Xsed -e "$delay_single_quote_subst"`'
-deplibs_check_method='`$ECHO "X$deplibs_check_method" | $Xsed -e "$delay_single_quote_subst"`'
-file_magic_cmd='`$ECHO "X$file_magic_cmd" | $Xsed -e "$delay_single_quote_subst"`'
-AR='`$ECHO "X$AR" | $Xsed -e "$delay_single_quote_subst"`'
-AR_FLAGS='`$ECHO "X$AR_FLAGS" | $Xsed -e "$delay_single_quote_subst"`'
-STRIP='`$ECHO "X$STRIP" | $Xsed -e "$delay_single_quote_subst"`'
-RANLIB='`$ECHO "X$RANLIB" | $Xsed -e "$delay_single_quote_subst"`'
-old_postinstall_cmds='`$ECHO "X$old_postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-old_postuninstall_cmds='`$ECHO "X$old_postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_cmds='`$ECHO "X$old_archive_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-CC='`$ECHO "X$CC" | $Xsed -e "$delay_single_quote_subst"`'
-CFLAGS='`$ECHO "X$CFLAGS" | $Xsed -e "$delay_single_quote_subst"`'
-compiler='`$ECHO "X$compiler" | $Xsed -e "$delay_single_quote_subst"`'
-GCC='`$ECHO "X$GCC" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_pipe='`$ECHO "X$lt_cv_sys_global_symbol_pipe" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_cdecl='`$ECHO "X$lt_cv_sys_global_symbol_to_cdecl" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`'
-objdir='`$ECHO "X$objdir" | $Xsed -e "$delay_single_quote_subst"`'
-SHELL='`$ECHO "X$SHELL" | $Xsed -e "$delay_single_quote_subst"`'
-ECHO='`$ECHO "X$ECHO" | $Xsed -e "$delay_single_quote_subst"`'
-MAGIC_CMD='`$ECHO "X$MAGIC_CMD" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag='`$ECHO "X$lt_prog_compiler_no_builtin_flag" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_wl='`$ECHO "X$lt_prog_compiler_wl" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_pic='`$ECHO "X$lt_prog_compiler_pic" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_static='`$ECHO "X$lt_prog_compiler_static" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o='`$ECHO "X$lt_cv_prog_compiler_c_o" | $Xsed -e "$delay_single_quote_subst"`'
-need_locks='`$ECHO "X$need_locks" | $Xsed -e "$delay_single_quote_subst"`'
-DSYMUTIL='`$ECHO "X$DSYMUTIL" | $Xsed -e "$delay_single_quote_subst"`'
-NMEDIT='`$ECHO "X$NMEDIT" | $Xsed -e "$delay_single_quote_subst"`'
-LIPO='`$ECHO "X$LIPO" | $Xsed -e "$delay_single_quote_subst"`'
-OTOOL='`$ECHO "X$OTOOL" | $Xsed -e "$delay_single_quote_subst"`'
-OTOOL64='`$ECHO "X$OTOOL64" | $Xsed -e "$delay_single_quote_subst"`'
-libext='`$ECHO "X$libext" | $Xsed -e "$delay_single_quote_subst"`'
-shrext_cmds='`$ECHO "X$shrext_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-extract_expsyms_cmds='`$ECHO "X$extract_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-archive_cmds_need_lc='`$ECHO "X$archive_cmds_need_lc" | $Xsed -e "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes='`$ECHO "X$enable_shared_with_static_runtimes" | $Xsed -e "$delay_single_quote_subst"`'
-export_dynamic_flag_spec='`$ECHO "X$export_dynamic_flag_spec" | $Xsed -e "$delay_single_quote_subst"`'
-whole_archive_flag_spec='`$ECHO "X$whole_archive_flag_spec" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_needs_object='`$ECHO "X$compiler_needs_object" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_from_new_cmds='`$ECHO "X$old_archive_from_new_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds='`$ECHO "X$old_archive_from_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-archive_cmds='`$ECHO "X$archive_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-archive_expsym_cmds='`$ECHO "X$archive_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-module_cmds='`$ECHO "X$module_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-module_expsym_cmds='`$ECHO "X$module_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-with_gnu_ld='`$ECHO "X$with_gnu_ld" | $Xsed -e "$delay_single_quote_subst"`'
-allow_undefined_flag='`$ECHO "X$allow_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`'
-no_undefined_flag='`$ECHO "X$no_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec='`$ECHO "X$hardcode_libdir_flag_spec" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec_ld='`$ECHO "X$hardcode_libdir_flag_spec_ld" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_separator='`$ECHO "X$hardcode_libdir_separator" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_direct='`$ECHO "X$hardcode_direct" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_direct_absolute='`$ECHO "X$hardcode_direct_absolute" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_minus_L='`$ECHO "X$hardcode_minus_L" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_shlibpath_var='`$ECHO "X$hardcode_shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_automatic='`$ECHO "X$hardcode_automatic" | $Xsed -e "$delay_single_quote_subst"`'
-inherit_rpath='`$ECHO "X$inherit_rpath" | $Xsed -e "$delay_single_quote_subst"`'
-link_all_deplibs='`$ECHO "X$link_all_deplibs" | $Xsed -e "$delay_single_quote_subst"`'
-fix_srcfile_path='`$ECHO "X$fix_srcfile_path" | $Xsed -e "$delay_single_quote_subst"`'
-always_export_symbols='`$ECHO "X$always_export_symbols" | $Xsed -e "$delay_single_quote_subst"`'
-export_symbols_cmds='`$ECHO "X$export_symbols_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-exclude_expsyms='`$ECHO "X$exclude_expsyms" | $Xsed -e "$delay_single_quote_subst"`'
-include_expsyms='`$ECHO "X$include_expsyms" | $Xsed -e "$delay_single_quote_subst"`'
-prelink_cmds='`$ECHO "X$prelink_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-file_list_spec='`$ECHO "X$file_list_spec" | $Xsed -e "$delay_single_quote_subst"`'
-variables_saved_for_relink='`$ECHO "X$variables_saved_for_relink" | $Xsed -e "$delay_single_quote_subst"`'
-need_lib_prefix='`$ECHO "X$need_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`'
-need_version='`$ECHO "X$need_version" | $Xsed -e "$delay_single_quote_subst"`'
-version_type='`$ECHO "X$version_type" | $Xsed -e "$delay_single_quote_subst"`'
-runpath_var='`$ECHO "X$runpath_var" | $Xsed -e "$delay_single_quote_subst"`'
-shlibpath_var='`$ECHO "X$shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`'
-shlibpath_overrides_runpath='`$ECHO "X$shlibpath_overrides_runpath" | $Xsed -e "$delay_single_quote_subst"`'
-libname_spec='`$ECHO "X$libname_spec" | $Xsed -e "$delay_single_quote_subst"`'
-library_names_spec='`$ECHO "X$library_names_spec" | $Xsed -e "$delay_single_quote_subst"`'
-soname_spec='`$ECHO "X$soname_spec" | $Xsed -e "$delay_single_quote_subst"`'
-postinstall_cmds='`$ECHO "X$postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-postuninstall_cmds='`$ECHO "X$postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-finish_cmds='`$ECHO "X$finish_cmds" | $Xsed -e "$delay_single_quote_subst"`'
-finish_eval='`$ECHO "X$finish_eval" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_into_libs='`$ECHO "X$hardcode_into_libs" | $Xsed -e "$delay_single_quote_subst"`'
-sys_lib_search_path_spec='`$ECHO "X$sys_lib_search_path_spec" | $Xsed -e "$delay_single_quote_subst"`'
-sys_lib_dlsearch_path_spec='`$ECHO "X$sys_lib_dlsearch_path_spec" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_action='`$ECHO "X$hardcode_action" | $Xsed -e "$delay_single_quote_subst"`'
-enable_dlopen='`$ECHO "X$enable_dlopen" | $Xsed -e "$delay_single_quote_subst"`'
-enable_dlopen_self='`$ECHO "X$enable_dlopen_self" | $Xsed -e "$delay_single_quote_subst"`'
-enable_dlopen_self_static='`$ECHO "X$enable_dlopen_self_static" | $Xsed -e "$delay_single_quote_subst"`'
-old_striplib='`$ECHO "X$old_striplib" | $Xsed -e "$delay_single_quote_subst"`'
-striplib='`$ECHO "X$striplib" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_lib_search_dirs='`$ECHO "X$compiler_lib_search_dirs" | $Xsed -e "$delay_single_quote_subst"`'
-predep_objects='`$ECHO "X$predep_objects" | $Xsed -e "$delay_single_quote_subst"`'
-postdep_objects='`$ECHO "X$postdep_objects" | $Xsed -e "$delay_single_quote_subst"`'
-predeps='`$ECHO "X$predeps" | $Xsed -e "$delay_single_quote_subst"`'
-postdeps='`$ECHO "X$postdeps" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_lib_search_path='`$ECHO "X$compiler_lib_search_path" | $Xsed -e "$delay_single_quote_subst"`'
-LD_CXX='`$ECHO "X$LD_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_cmds_CXX='`$ECHO "X$old_archive_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_CXX='`$ECHO "X$compiler_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-GCC_CXX='`$ECHO "X$GCC_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "X$lt_prog_compiler_no_builtin_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_wl_CXX='`$ECHO "X$lt_prog_compiler_wl_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_pic_CXX='`$ECHO "X$lt_prog_compiler_pic_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-lt_prog_compiler_static_CXX='`$ECHO "X$lt_prog_compiler_static_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o_CXX='`$ECHO "X$lt_cv_prog_compiler_c_o_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-archive_cmds_need_lc_CXX='`$ECHO "X$archive_cmds_need_lc_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes_CXX='`$ECHO "X$enable_shared_with_static_runtimes_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-export_dynamic_flag_spec_CXX='`$ECHO "X$export_dynamic_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-whole_archive_flag_spec_CXX='`$ECHO "X$whole_archive_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_needs_object_CXX='`$ECHO "X$compiler_needs_object_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_from_new_cmds_CXX='`$ECHO "X$old_archive_from_new_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds_CXX='`$ECHO "X$old_archive_from_expsyms_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-archive_cmds_CXX='`$ECHO "X$archive_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-archive_expsym_cmds_CXX='`$ECHO "X$archive_expsym_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-module_cmds_CXX='`$ECHO "X$module_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-module_expsym_cmds_CXX='`$ECHO "X$module_expsym_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-with_gnu_ld_CXX='`$ECHO "X$with_gnu_ld_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-allow_undefined_flag_CXX='`$ECHO "X$allow_undefined_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-no_undefined_flag_CXX='`$ECHO "X$no_undefined_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec_CXX='`$ECHO "X$hardcode_libdir_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec_ld_CXX='`$ECHO "X$hardcode_libdir_flag_spec_ld_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_libdir_separator_CXX='`$ECHO "X$hardcode_libdir_separator_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_direct_CXX='`$ECHO "X$hardcode_direct_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_direct_absolute_CXX='`$ECHO "X$hardcode_direct_absolute_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_minus_L_CXX='`$ECHO "X$hardcode_minus_L_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_shlibpath_var_CXX='`$ECHO "X$hardcode_shlibpath_var_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_automatic_CXX='`$ECHO "X$hardcode_automatic_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-inherit_rpath_CXX='`$ECHO "X$inherit_rpath_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-link_all_deplibs_CXX='`$ECHO "X$link_all_deplibs_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-fix_srcfile_path_CXX='`$ECHO "X$fix_srcfile_path_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-always_export_symbols_CXX='`$ECHO "X$always_export_symbols_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-export_symbols_cmds_CXX='`$ECHO "X$export_symbols_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-exclude_expsyms_CXX='`$ECHO "X$exclude_expsyms_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-include_expsyms_CXX='`$ECHO "X$include_expsyms_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-prelink_cmds_CXX='`$ECHO "X$prelink_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-file_list_spec_CXX='`$ECHO "X$file_list_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-hardcode_action_CXX='`$ECHO "X$hardcode_action_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_lib_search_dirs_CXX='`$ECHO "X$compiler_lib_search_dirs_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-predep_objects_CXX='`$ECHO "X$predep_objects_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-postdep_objects_CXX='`$ECHO "X$postdep_objects_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-predeps_CXX='`$ECHO "X$predeps_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-postdeps_CXX='`$ECHO "X$postdeps_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-compiler_lib_search_path_CXX='`$ECHO "X$compiler_lib_search_path_CXX" | $Xsed -e "$delay_single_quote_subst"`'
-
-LTCC='$LTCC'
-LTCFLAGS='$LTCFLAGS'
-compiler='$compiler_DEFAULT'
-
-# Quote evaled strings.
-for var in SED \
-GREP \
-EGREP \
-FGREP \
-LD \
-NM \
-LN_S \
-lt_SP2NL \
-lt_NL2SP \
-reload_flag \
-OBJDUMP \
-deplibs_check_method \
-file_magic_cmd \
-AR \
-AR_FLAGS \
-STRIP \
-RANLIB \
-CC \
-CFLAGS \
-compiler \
-lt_cv_sys_global_symbol_pipe \
-lt_cv_sys_global_symbol_to_cdecl \
-lt_cv_sys_global_symbol_to_c_name_address \
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-SHELL \
-ECHO \
-lt_prog_compiler_no_builtin_flag \
-lt_prog_compiler_wl \
-lt_prog_compiler_pic \
-lt_prog_compiler_static \
-lt_cv_prog_compiler_c_o \
-need_locks \
-DSYMUTIL \
-NMEDIT \
-LIPO \
-OTOOL \
-OTOOL64 \
-shrext_cmds \
-export_dynamic_flag_spec \
-whole_archive_flag_spec \
-compiler_needs_object \
-with_gnu_ld \
-allow_undefined_flag \
-no_undefined_flag \
-hardcode_libdir_flag_spec \
-hardcode_libdir_flag_spec_ld \
-hardcode_libdir_separator \
-fix_srcfile_path \
-exclude_expsyms \
-include_expsyms \
-file_list_spec \
-variables_saved_for_relink \
-libname_spec \
-library_names_spec \
-soname_spec \
-finish_eval \
-old_striplib \
-striplib \
-compiler_lib_search_dirs \
-predep_objects \
-postdep_objects \
-predeps \
-postdeps \
-compiler_lib_search_path \
-LD_CXX \
-compiler_CXX \
-lt_prog_compiler_no_builtin_flag_CXX \
-lt_prog_compiler_wl_CXX \
-lt_prog_compiler_pic_CXX \
-lt_prog_compiler_static_CXX \
-lt_cv_prog_compiler_c_o_CXX \
-export_dynamic_flag_spec_CXX \
-whole_archive_flag_spec_CXX \
-compiler_needs_object_CXX \
-with_gnu_ld_CXX \
-allow_undefined_flag_CXX \
-no_undefined_flag_CXX \
-hardcode_libdir_flag_spec_CXX \
-hardcode_libdir_flag_spec_ld_CXX \
-hardcode_libdir_separator_CXX \
-fix_srcfile_path_CXX \
-exclude_expsyms_CXX \
-include_expsyms_CXX \
-file_list_spec_CXX \
-compiler_lib_search_dirs_CXX \
-predep_objects_CXX \
-postdep_objects_CXX \
-predeps_CXX \
-postdeps_CXX \
-compiler_lib_search_path_CXX; do
- case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in
- *[\\\\\\\`\\"\\\$]*)
- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
- ;;
- *)
- eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
- ;;
- esac
-done
-
-# Double-quote double-evaled strings.
-for var in reload_cmds \
-old_postinstall_cmds \
-old_postuninstall_cmds \
-old_archive_cmds \
-extract_expsyms_cmds \
-old_archive_from_new_cmds \
-old_archive_from_expsyms_cmds \
-archive_cmds \
-archive_expsym_cmds \
-module_cmds \
-module_expsym_cmds \
-export_symbols_cmds \
-prelink_cmds \
-postinstall_cmds \
-postuninstall_cmds \
-finish_cmds \
-sys_lib_search_path_spec \
-sys_lib_dlsearch_path_spec \
-old_archive_cmds_CXX \
-old_archive_from_new_cmds_CXX \
-old_archive_from_expsyms_cmds_CXX \
-archive_cmds_CXX \
-archive_expsym_cmds_CXX \
-module_cmds_CXX \
-module_expsym_cmds_CXX \
-export_symbols_cmds_CXX \
-prelink_cmds_CXX; do
- case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in
- *[\\\\\\\`\\"\\\$]*)
- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
- ;;
- *)
- eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
- ;;
- esac
-done
-
-# Fix-up fallback echo if it was mangled by the above quoting rules.
-case \$lt_ECHO in
-*'\\\$0 --fallback-echo"') lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\$0 --fallback-echo"\$/\$0 --fallback-echo"/'\`
- ;;
-esac
-
-ac_aux_dir='$ac_aux_dir'
-xsi_shell='$xsi_shell'
-lt_shell_append='$lt_shell_append'
-
-# See if we are running on zsh, and set the options which allow our
-# commands through without removal of \ escapes INIT.
-if test -n "\${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
-fi
-
-
- PACKAGE='$PACKAGE'
- VERSION='$VERSION'
- TIMESTAMP='$TIMESTAMP'
- RM='$RM'
- ofile='$ofile'
-
-
-
-
-
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-
-# Handling of arguments.
-for ac_config_target in $ac_config_targets
-do
- case $ac_config_target in
- "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
- "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
- "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
- "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
- "tools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/Makefile" ;;
-
- *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
- esac
-done
-
-
-# If the user did not use the arguments to specify the items to instantiate,
-# then the envvar interface is used. Set only those that are not.
-# We use the long form for the default assignment because of an extremely
-# bizarre bug on SunOS 4.1.3.
-if $ac_need_defaults; then
- test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
- test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
- test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
-fi
-
-# Have a temporary directory for convenience. Make it in the build tree
-# simply because there is no reason against having it here, and in addition,
-# creating and moving files from /tmp can sometimes cause problems.
-# Hook for its removal unless debugging.
-# Note that there is a small window in which the directory will not be cleaned:
-# after its creation but before its name has been assigned to `$tmp'.
-$debug ||
-{
- tmp= ac_tmp=
- trap 'exit_status=$?
- : "${ac_tmp:=$tmp}"
- { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
-' 0
- trap 'as_fn_exit 1' 1 2 13 15
-}
-# Create a (secure) tmp directory for tmp files.
-
-{
- tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
- test -d "$tmp"
-} ||
-{
- tmp=./conf$$-$RANDOM
- (umask 077 && mkdir "$tmp")
-} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
-ac_tmp=$tmp
-
-# Set up the scripts for CONFIG_FILES section.
-# No need to generate them if there are no CONFIG_FILES.
-# This happens for instance with `./config.status config.h'.
-if test -n "$CONFIG_FILES"; then
-
-
-ac_cr=`echo X | tr X '\015'`
-# On cygwin, bash can eat \r inside `` if the user requested igncr.
-# But we know of no other shell where ac_cr would be empty at this
-# point, so we can use a bashism as a fallback.
-if test "x$ac_cr" = x; then
- eval ac_cr=\$\'\\r\'
-fi
-ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
-if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
- ac_cs_awk_cr='\\r'
-else
- ac_cs_awk_cr=$ac_cr
-fi
-
-echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
-_ACEOF
-
-
-{
- echo "cat >conf$$subs.awk <<_ACEOF" &&
- echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
- echo "_ACEOF"
-} >conf$$subs.sh ||
- as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
-ac_delim='%!_!# '
-for ac_last_try in false false false false false :; do
- . ./conf$$subs.sh ||
- as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-
- ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
- if test $ac_delim_n = $ac_delim_num; then
- break
- elif $ac_last_try; then
- as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
- else
- ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
- fi
-done
-rm -f conf$$subs.sh
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
-_ACEOF
-sed -n '
-h
-s/^/S["/; s/!.*/"]=/
-p
-g
-s/^[^!]*!//
-:repl
-t repl
-s/'"$ac_delim"'$//
-t delim
-:nl
-h
-s/\(.\{148\}\)..*/\1/
-t more1
-s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
-p
-n
-b repl
-:more1
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t nl
-:delim
-h
-s/\(.\{148\}\)..*/\1/
-t more2
-s/["\\]/\\&/g; s/^/"/; s/$/"/
-p
-b
-:more2
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t delim
-' <conf$$subs.awk | sed '
-/^[^""]/{
- N
- s/\n//
-}
-' >>$CONFIG_STATUS || ac_write_fail=1
-rm -f conf$$subs.awk
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-_ACAWK
-cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
- for (key in S) S_is_set[key] = 1
- FS = ""
-
-}
-{
- line = $ 0
- nfields = split(line, field, "@")
- substed = 0
- len = length(field[1])
- for (i = 2; i < nfields; i++) {
- key = field[i]
- keylen = length(key)
- if (S_is_set[key]) {
- value = S[key]
- line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
- len += length(value) + length(field[++i])
- substed = 1
- } else
- len += 1 + keylen
- }
-
- print line
-}
-
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
- sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
-else
- cat
-fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
- || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
-_ACEOF
-
-# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
-# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
-# trailing colons and then remove the whole line if VPATH becomes empty
-# (actually we leave an empty line to preserve line numbers).
-if test "x$srcdir" = x.; then
- ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{
-h
-s///
-s/^/:/
-s/[ ]*$/:/
-s/:\$(srcdir):/:/g
-s/:\${srcdir}:/:/g
-s/:@srcdir@:/:/g
-s/^:*//
-s/:*$//
-x
-s/\(=[ ]*\).*/\1/
-G
-s/\n//
-s/^[^=]*=[ ]*$//
-}'
-fi
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-fi # test -n "$CONFIG_FILES"
-
-# Set up the scripts for CONFIG_HEADERS section.
-# No need to generate them if there are no CONFIG_HEADERS.
-# This happens for instance with `./config.status Makefile'.
-if test -n "$CONFIG_HEADERS"; then
-cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
-BEGIN {
-_ACEOF
-
-# Transform confdefs.h into an awk script `defines.awk', embedded as
-# here-document in config.status, that substitutes the proper values into
-# config.h.in to produce config.h.
-
-# Create a delimiter string that does not exist in confdefs.h, to ease
-# handling of long lines.
-ac_delim='%!_!# '
-for ac_last_try in false false :; do
- ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
- if test -z "$ac_tt"; then
- break
- elif $ac_last_try; then
- as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
- else
- ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
- fi
-done
-
-# For the awk script, D is an array of macro values keyed by name,
-# likewise P contains macro parameters if any. Preserve backslash
-# newline sequences.
-
-ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
-sed -n '
-s/.\{148\}/&'"$ac_delim"'/g
-t rset
-:rset
-s/^[ ]*#[ ]*define[ ][ ]*/ /
-t def
-d
-:def
-s/\\$//
-t bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3"/p
-s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p
-d
-:bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3\\\\\\n"\\/p
-t cont
-s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
-t cont
-d
-:cont
-n
-s/.\{148\}/&'"$ac_delim"'/g
-t clear
-:clear
-s/\\$//
-t bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/"/p
-d
-:bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
-b cont
-' <confdefs.h | sed '
-s/'"$ac_delim"'/"\\\
-"/g' >>$CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
- for (key in D) D_is_set[key] = 1
- FS = ""
-}
-/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
- line = \$ 0
- split(line, arg, " ")
- if (arg[1] == "#") {
- defundef = arg[2]
- mac1 = arg[3]
- } else {
- defundef = substr(arg[1], 2)
- mac1 = arg[2]
- }
- split(mac1, mac2, "(") #)
- macro = mac2[1]
- prefix = substr(line, 1, index(line, defundef) - 1)
- if (D_is_set[macro]) {
- # Preserve the white space surrounding the "#".
- print prefix "define", macro P[macro] D[macro]
- next
- } else {
- # Replace #undef with comments. This is necessary, for example,
- # in the case of _POSIX_SOURCE, which is predefined and required
- # on some systems where configure will not decide to define it.
- if (defundef == "undef") {
- print "/*", prefix defundef, macro, "*/"
- next
- }
- }
-}
-{ print }
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
- as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
-fi # test -n "$CONFIG_HEADERS"
-
-
-eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS"
-shift
-for ac_tag
-do
- case $ac_tag in
- :[FHLC]) ac_mode=$ac_tag; continue;;
- esac
- case $ac_mode$ac_tag in
- :[FHL]*:*);;
- :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
- :[FH]-) ac_tag=-:-;;
- :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
- esac
- ac_save_IFS=$IFS
- IFS=:
- set x $ac_tag
- IFS=$ac_save_IFS
- shift
- ac_file=$1
- shift
-
- case $ac_mode in
- :L) ac_source=$1;;
- :[FH])
- ac_file_inputs=
- for ac_f
- do
- case $ac_f in
- -) ac_f="$ac_tmp/stdin";;
- *) # Look for the file first in the build tree, then in the source tree
- # (if the path is not absolute). The absolute path cannot be DOS-style,
- # because $ac_f cannot contain `:'.
- test -f "$ac_f" ||
- case $ac_f in
- [\\/$]*) false;;
- *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
- esac ||
- as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
- esac
- case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
- as_fn_append ac_file_inputs " '$ac_f'"
- done
-
- # Let's still pretend it is `configure' which instantiates (i.e., don't
- # use $as_me), people would be surprised to read:
- # /* config.h. Generated by config.status. */
- configure_input='Generated from '`
- $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
- `' by configure.'
- if test x"$ac_file" != x-; then
- configure_input="$ac_file. $configure_input"
- { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
-$as_echo "$as_me: creating $ac_file" >&6;}
- fi
- # Neutralize special characters interpreted by sed in replacement strings.
- case $configure_input in #(
- *\&* | *\|* | *\\* )
- ac_sed_conf_input=`$as_echo "$configure_input" |
- sed 's/[\\\\&|]/\\\\&/g'`;; #(
- *) ac_sed_conf_input=$configure_input;;
- esac
-
- case $ac_tag in
- *:-:* | *:-) cat >"$ac_tmp/stdin" \
- || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
- esac
- ;;
- esac
-
- ac_dir=`$as_dirname -- "$ac_file" ||
-$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$ac_file" : 'X\(//\)[^/]' \| \
- X"$ac_file" : 'X\(//\)$' \| \
- X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$ac_file" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- as_dir="$ac_dir"; as_fn_mkdir_p
- ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
- ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
- # A ".." for each directory in $ac_dir_suffix.
- ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
- case $ac_top_builddir_sub in
- "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
- *) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
- esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
- .) # We are building in place.
- ac_srcdir=.
- ac_top_srcdir=$ac_top_builddir_sub
- ac_abs_top_srcdir=$ac_pwd ;;
- [\\/]* | ?:[\\/]* ) # Absolute name.
- ac_srcdir=$srcdir$ac_dir_suffix;
- ac_top_srcdir=$srcdir
- ac_abs_top_srcdir=$srcdir ;;
- *) # Relative name.
- ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
- ac_top_srcdir=$ac_top_build_prefix$srcdir
- ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
-
- case $ac_mode in
- :F)
- #
- # CONFIG_FILE
- #
-
- case $INSTALL in
- [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
- *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
- esac
- ac_MKDIR_P=$MKDIR_P
- case $MKDIR_P in
- [\\/$]* | ?:[\\/]* ) ;;
- */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
- esac
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# If the template does not know about datarootdir, expand it.
-# FIXME: This hack should be removed a few years after 2.60.
-ac_datarootdir_hack=; ac_datarootdir_seen=
-ac_sed_dataroot='
-/datarootdir/ {
- p
- q
-}
-/@datadir@/p
-/@docdir@/p
-/@infodir@/p
-/@localedir@/p
-/@mandir@/p'
-case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
-*datarootdir*) ac_datarootdir_seen=yes;;
-*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
-$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
- ac_datarootdir_hack='
- s&@datadir@&$datadir&g
- s&@docdir@&$docdir&g
- s&@infodir@&$infodir&g
- s&@localedir@&$localedir&g
- s&@mandir@&$mandir&g
- s&\\\${datarootdir}&$datarootdir&g' ;;
-esac
-_ACEOF
-
-# Neutralize VPATH when `$srcdir' = `.'.
-# Shell code in configure.ac might set extrasub.
-# FIXME: do we really want to maintain this feature?
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_sed_extra="$ac_vpsub
-$extrasub
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-:t
-/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
-s|@configure_input@|$ac_sed_conf_input|;t t
-s&@top_builddir@&$ac_top_builddir_sub&;t t
-s&@top_build_prefix@&$ac_top_build_prefix&;t t
-s&@srcdir@&$ac_srcdir&;t t
-s&@abs_srcdir@&$ac_abs_srcdir&;t t
-s&@top_srcdir@&$ac_top_srcdir&;t t
-s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
-s&@builddir@&$ac_builddir&;t t
-s&@abs_builddir@&$ac_abs_builddir&;t t
-s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
-s&@INSTALL@&$ac_INSTALL&;t t
-s&@MKDIR_P@&$ac_MKDIR_P&;t t
-$ac_datarootdir_hack
-"
-eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
- >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-
-test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
- { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
- { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \
- "$ac_tmp/out"`; test -z "$ac_out"; } &&
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined. Please make sure it is defined" >&5
-$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined. Please make sure it is defined" >&2;}
-
- rm -f "$ac_tmp/stdin"
- case $ac_file in
- -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
- *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
- esac \
- || as_fn_error $? "could not create $ac_file" "$LINENO" 5
- ;;
- :H)
- #
- # CONFIG_HEADER
- #
- if test x"$ac_file" != x-; then
- {
- $as_echo "/* $configure_input */" \
- && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
- } >"$ac_tmp/config.h" \
- || as_fn_error $? "could not create $ac_file" "$LINENO" 5
- if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
-$as_echo "$as_me: $ac_file is unchanged" >&6;}
- else
- rm -f "$ac_file"
- mv "$ac_tmp/config.h" "$ac_file" \
- || as_fn_error $? "could not create $ac_file" "$LINENO" 5
- fi
- else
- $as_echo "/* $configure_input */" \
- && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
- || as_fn_error $? "could not create -" "$LINENO" 5
- fi
-# Compute "$ac_file"'s index in $config_headers.
-_am_arg="$ac_file"
-_am_stamp_count=1
-for _am_header in $config_headers :; do
- case $_am_header in
- $_am_arg | $_am_arg:* )
- break ;;
- * )
- _am_stamp_count=`expr $_am_stamp_count + 1` ;;
- esac
-done
-echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
-$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$_am_arg" : 'X\(//\)[^/]' \| \
- X"$_am_arg" : 'X\(//\)$' \| \
- X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$_am_arg" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`/stamp-h$_am_stamp_count
- ;;
-
- :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
-$as_echo "$as_me: executing $ac_file commands" >&6;}
- ;;
- esac
-
-
- case $ac_file$ac_mode in
- "depfiles":C) test x"$AMDEP_TRUE" != x"" || {
- # Autoconf 2.62 quotes --file arguments for eval, but not when files
- # are listed without --file. Let's play safe and only enable the eval
- # if we detect the quoting.
- case $CONFIG_FILES in
- *\'*) eval set x "$CONFIG_FILES" ;;
- *) set x $CONFIG_FILES ;;
- esac
- shift
- for mf
- do
- # Strip MF so we end up with the name of the file.
- mf=`echo "$mf" | sed -e 's/:.*$//'`
- # Check whether this is an Automake generated Makefile or not.
- # We used to match only the files named `Makefile.in', but
- # some people rename them; so instead we look at the file content.
- # Grep'ing the first line is not enough: some people post-process
- # each Makefile.in and add a new line on top of each file to say so.
- # Grep'ing the whole file is not good either: AIX grep has a line
- # limit of 2048, but all sed's we know have understand at least 4000.
- if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
- dirpart=`$as_dirname -- "$mf" ||
-$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$mf" : 'X\(//\)[^/]' \| \
- X"$mf" : 'X\(//\)$' \| \
- X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$mf" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- else
- continue
- fi
- # Extract the definition of DEPDIR, am__include, and am__quote
- # from the Makefile without running `make'.
- DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
- test -z "$DEPDIR" && continue
- am__include=`sed -n 's/^am__include = //p' < "$mf"`
- test -z "am__include" && continue
- am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
- # When using ansi2knr, U may be empty or an underscore; expand it
- U=`sed -n 's/^U = //p' < "$mf"`
- # Find all dependency output files, they are included files with
- # $(DEPDIR) in their names. We invoke sed twice because it is the
- # simplest approach to changing $(DEPDIR) to its actual value in the
- # expansion.
- for file in `sed -n "
- s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
- sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
- # Make sure the directory exists.
- test -f "$dirpart/$file" && continue
- fdir=`$as_dirname -- "$file" ||
-$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$file" : 'X\(//\)[^/]' \| \
- X"$file" : 'X\(//\)$' \| \
- X"$file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$file" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'`
- as_dir=$dirpart/$fdir; as_fn_mkdir_p
- # echo "creating $dirpart/$file"
- echo '# dummy' > "$dirpart/$file"
- done
- done
-}
- ;;
- "libtool":C)
-
- # See if we are running on zsh, and set the options which allow our
- # commands through without removal of \ escapes.
- if test -n "${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
- fi
-
- cfgfile="${ofile}T"
- trap "$RM \"$cfgfile\"; exit 1" 1 2 15
- $RM "$cfgfile"
-
- cat <<_LT_EOF >> "$cfgfile"
-#! $SHELL
-
-# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
-# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
-# NOTE: Changes made to this file will be lost: look at ltmain.sh.
-#
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
-# 2006, 2007, 2008 Free Software Foundation, Inc.
-# Written by Gordon Matzigkeit, 1996
-#
-# This file is part of GNU Libtool.
-#
-# GNU Libtool is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as
-# published by the Free Software Foundation; either version 2 of
-# the License, or (at your option) any later version.
-#
-# As a special exception to the GNU General Public License,
-# if you distribute this file as part of a program or library that
-# is built using GNU Libtool, you may include this file under the
-# same distribution terms that you use for the rest of that program.
-#
-# GNU Libtool is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Libtool; see the file COPYING. If not, a copy
-# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
-# obtained by writing to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-
-# The names of the tagged configurations supported by this script.
-available_tags="CXX "
-
-# ### BEGIN LIBTOOL CONFIG
-
-# Which release of libtool.m4 was used?
-macro_version=$macro_version
-macro_revision=$macro_revision
-
-# Whether or not to build shared libraries.
-build_libtool_libs=$enable_shared
-
-# Whether or not to build static libraries.
-build_old_libs=$enable_static
-
-# What type of objects to build.
-pic_mode=$pic_mode
-
-# Whether or not to optimize for fast installation.
-fast_install=$enable_fast_install
-
-# The host system.
-host_alias=$host_alias
-host=$host
-host_os=$host_os
-
-# The build system.
-build_alias=$build_alias
-build=$build
-build_os=$build_os
-
-# A sed program that does not truncate output.
-SED=$lt_SED
-
-# Sed that helps us avoid accidentally triggering echo(1) options like -n.
-Xsed="\$SED -e 1s/^X//"
-
-# A grep program that handles long lines.
-GREP=$lt_GREP
-
-# An ERE matcher.
-EGREP=$lt_EGREP
-
-# A literal string matcher.
-FGREP=$lt_FGREP
-
-# A BSD- or MS-compatible name lister.
-NM=$lt_NM
-
-# Whether we need soft or hard links.
-LN_S=$lt_LN_S
-
-# What is the maximum length of a command?
-max_cmd_len=$max_cmd_len
-
-# Object file suffix (normally "o").
-objext=$ac_objext
-
-# Executable file suffix (normally "").
-exeext=$exeext
-
-# whether the shell understands "unset".
-lt_unset=$lt_unset
-
-# turn spaces into newlines.
-SP2NL=$lt_lt_SP2NL
-
-# turn newlines into spaces.
-NL2SP=$lt_lt_NL2SP
-
-# How to create reloadable object files.
-reload_flag=$lt_reload_flag
-reload_cmds=$lt_reload_cmds
-
-# An object symbol dumper.
-OBJDUMP=$lt_OBJDUMP
-
-# Method to check whether dependent libraries are shared objects.
-deplibs_check_method=$lt_deplibs_check_method
-
-# Command to use when deplibs_check_method == "file_magic".
-file_magic_cmd=$lt_file_magic_cmd
-
-# The archiver.
-AR=$lt_AR
-AR_FLAGS=$lt_AR_FLAGS
-
-# A symbol stripping program.
-STRIP=$lt_STRIP
-
-# Commands used to install an old-style archive.
-RANLIB=$lt_RANLIB
-old_postinstall_cmds=$lt_old_postinstall_cmds
-old_postuninstall_cmds=$lt_old_postuninstall_cmds
-
-# A C compiler.
-LTCC=$lt_CC
-
-# LTCC compiler flags.
-LTCFLAGS=$lt_CFLAGS
-
-# Take the output of nm and produce a listing of raw symbols and C names.
-global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
-
-# Transform the output of nm in a proper C declaration.
-global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
-
-# Transform the output of nm in a C name address pair.
-global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
-
-# Transform the output of nm in a C name address pair when lib prefix is needed.
-global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-# The name of the directory that contains temporary libtool files.
-objdir=$objdir
-
-# Shell to use when invoking shell scripts.
-SHELL=$lt_SHELL
-
-# An echo program that does not interpret backslashes.
-ECHO=$lt_ECHO
-
-# Used to examine libraries when file_magic_cmd begins with "file".
-MAGIC_CMD=$MAGIC_CMD
-
-# Must we lock files when doing compilation?
-need_locks=$lt_need_locks
-
-# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
-DSYMUTIL=$lt_DSYMUTIL
-
-# Tool to change global to local symbols on Mac OS X.
-NMEDIT=$lt_NMEDIT
-
-# Tool to manipulate fat objects and archives on Mac OS X.
-LIPO=$lt_LIPO
-
-# ldd/readelf like tool for Mach-O binaries on Mac OS X.
-OTOOL=$lt_OTOOL
-
-# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
-OTOOL64=$lt_OTOOL64
-
-# Old archive suffix (normally "a").
-libext=$libext
-
-# Shared library suffix (normally ".so").
-shrext_cmds=$lt_shrext_cmds
-
-# The commands to extract the exported symbol list from a shared archive.
-extract_expsyms_cmds=$lt_extract_expsyms_cmds
-
-# Variables whose values should be saved in libtool wrapper scripts and
-# restored at link time.
-variables_saved_for_relink=$lt_variables_saved_for_relink
-
-# Do we need the "lib" prefix for modules?
-need_lib_prefix=$need_lib_prefix
-
-# Do we need a version for libraries?
-need_version=$need_version
-
-# Library versioning type.
-version_type=$version_type
-
-# Shared library runtime path variable.
-runpath_var=$runpath_var
-
-# Shared library path variable.
-shlibpath_var=$shlibpath_var
-
-# Is shlibpath searched before the hard-coded library search path?
-shlibpath_overrides_runpath=$shlibpath_overrides_runpath
-
-# Format of library name prefix.
-libname_spec=$lt_libname_spec
-
-# List of archive names. First name is the real one, the rest are links.
-# The last name is the one that the linker finds with -lNAME
-library_names_spec=$lt_library_names_spec
-
-# The coded name of the library, if different from the real name.
-soname_spec=$lt_soname_spec
-
-# Command to use after installation of a shared archive.
-postinstall_cmds=$lt_postinstall_cmds
-
-# Command to use after uninstallation of a shared archive.
-postuninstall_cmds=$lt_postuninstall_cmds
-
-# Commands used to finish a libtool library installation in a directory.
-finish_cmds=$lt_finish_cmds
-
-# As "finish_cmds", except a single script fragment to be evaled but
-# not shown.
-finish_eval=$lt_finish_eval
-
-# Whether we should hardcode library paths into libraries.
-hardcode_into_libs=$hardcode_into_libs
-
-# Compile-time system search path for libraries.
-sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
-
-# Run-time system search path for libraries.
-sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
-
-# Whether dlopen is supported.
-dlopen_support=$enable_dlopen
-
-# Whether dlopen of programs is supported.
-dlopen_self=$enable_dlopen_self
-
-# Whether dlopen of statically linked programs is supported.
-dlopen_self_static=$enable_dlopen_self_static
-
-# Commands to strip libraries.
-old_striplib=$lt_old_striplib
-striplib=$lt_striplib
-
-
-# The linker used to build libraries.
-LD=$lt_LD
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds
-
-# A language specific compiler.
-CC=$lt_compiler
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds
-archive_expsym_cmds=$lt_archive_expsym_cmds
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds
-module_expsym_cmds=$lt_module_expsym_cmds
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
-
-# If ld is used when linking, flag to hardcode \$libdir into a binary
-# during linking. This must work even if \$libdir does not exist.
-hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs
-
-# Fix the shell variable \$srcfile for the compiler.
-fix_srcfile_path=$lt_fix_srcfile_path
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects
-postdep_objects=$lt_postdep_objects
-predeps=$lt_predeps
-postdeps=$lt_postdeps
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path
-
-# ### END LIBTOOL CONFIG
-
-_LT_EOF
-
- case $host_os in
- aix3*)
- cat <<\_LT_EOF >> "$cfgfile"
-# AIX sometimes has problems with the GCC collect2 program. For some
-# reason, if we set the COLLECT_NAMES environment variable, the problems
-# vanish in a puff of smoke.
-if test "X${COLLECT_NAMES+set}" != Xset; then
- COLLECT_NAMES=
- export COLLECT_NAMES
-fi
-_LT_EOF
- ;;
- esac
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-
- # We use sed instead of cat because bash on DJGPP gets confused if
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
- || (rm -f "$cfgfile"; exit 1)
-
- case $xsi_shell in
- yes)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_dirname file append nondir_replacement
-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
-# otherwise set result to NONDIR_REPLACEMENT.
-func_dirname ()
-{
- case ${1} in
- */*) func_dirname_result="${1%/*}${2}" ;;
- * ) func_dirname_result="${3}" ;;
- esac
-}
-
-# func_basename file
-func_basename ()
-{
- func_basename_result="${1##*/}"
-}
-
-# func_dirname_and_basename file append nondir_replacement
-# perform func_basename and func_dirname in a single function
-# call:
-# dirname: Compute the dirname of FILE. If nonempty,
-# add APPEND to the result, otherwise set result
-# to NONDIR_REPLACEMENT.
-# value returned in "$func_dirname_result"
-# basename: Compute filename of FILE.
-# value retuned in "$func_basename_result"
-# Implementation must be kept synchronized with func_dirname
-# and func_basename. For efficiency, we do not delegate to
-# those functions but instead duplicate the functionality here.
-func_dirname_and_basename ()
-{
- case ${1} in
- */*) func_dirname_result="${1%/*}${2}" ;;
- * ) func_dirname_result="${3}" ;;
- esac
- func_basename_result="${1##*/}"
-}
-
-# func_stripname prefix suffix name
-# strip PREFIX and SUFFIX off of NAME.
-# PREFIX and SUFFIX must not contain globbing or regex special
-# characters, hashes, percent signs, but SUFFIX may contain a leading
-# dot (in which case that matches only a dot).
-func_stripname ()
-{
- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
- # positional parameters, so assign one to ordinary parameter first.
- func_stripname_result=${3}
- func_stripname_result=${func_stripname_result#"${1}"}
- func_stripname_result=${func_stripname_result%"${2}"}
-}
-
-# func_opt_split
-func_opt_split ()
-{
- func_opt_split_opt=${1%%=*}
- func_opt_split_arg=${1#*=}
-}
-
-# func_lo2o object
-func_lo2o ()
-{
- case ${1} in
- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
- *) func_lo2o_result=${1} ;;
- esac
-}
-
-# func_xform libobj-or-source
-func_xform ()
-{
- func_xform_result=${1%.*}.lo
-}
-
-# func_arith arithmetic-term...
-func_arith ()
-{
- func_arith_result=$(( $* ))
-}
-
-# func_len string
-# STRING may not start with a hyphen.
-func_len ()
-{
- func_len_result=${#1}
-}
-
-_LT_EOF
- ;;
- *) # Bourne compatible functions.
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_dirname file append nondir_replacement
-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
-# otherwise set result to NONDIR_REPLACEMENT.
-func_dirname ()
-{
- # Extract subdirectory from the argument.
- func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"`
- if test "X$func_dirname_result" = "X${1}"; then
- func_dirname_result="${3}"
- else
- func_dirname_result="$func_dirname_result${2}"
- fi
-}
-
-# func_basename file
-func_basename ()
-{
- func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"`
-}
-
-
-# func_stripname prefix suffix name
-# strip PREFIX and SUFFIX off of NAME.
-# PREFIX and SUFFIX must not contain globbing or regex special
-# characters, hashes, percent signs, but SUFFIX may contain a leading
-# dot (in which case that matches only a dot).
-# func_strip_suffix prefix name
-func_stripname ()
-{
- case ${2} in
- .*) func_stripname_result=`$ECHO "X${3}" \
- | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;;
- *) func_stripname_result=`$ECHO "X${3}" \
- | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;;
- esac
-}
-
-# sed scripts:
-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
-my_sed_long_arg='1s/^-[^=]*=//'
-
-# func_opt_split
-func_opt_split ()
-{
- func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"`
- func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"`
-}
-
-# func_lo2o object
-func_lo2o ()
-{
- func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"`
-}
-
-# func_xform libobj-or-source
-func_xform ()
-{
- func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[^.]*$/.lo/'`
-}
-
-# func_arith arithmetic-term...
-func_arith ()
-{
- func_arith_result=`expr "$@"`
-}
-
-# func_len string
-# STRING may not start with a hyphen.
-func_len ()
-{
- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
-}
-
-_LT_EOF
-esac
-
-case $lt_shell_append in
- yes)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_append var value
-# Append VALUE to the end of shell variable VAR.
-func_append ()
-{
- eval "$1+=\$2"
-}
-_LT_EOF
- ;;
- *)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_append var value
-# Append VALUE to the end of shell variable VAR.
-func_append ()
-{
- eval "$1=\$$1\$2"
-}
-
-_LT_EOF
- ;;
- esac
-
-
- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
- || (rm -f "$cfgfile"; exit 1)
-
- mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-
- cat <<_LT_EOF >> "$ofile"
-
-# ### BEGIN LIBTOOL TAG CONFIG: CXX
-
-# The linker used to build libraries.
-LD=$lt_LD_CXX
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds_CXX
-
-# A language specific compiler.
-CC=$lt_compiler_CXX
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC_CXX
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl_CXX
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic_CXX
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static_CXX
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc_CXX
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object_CXX
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds_CXX
-archive_expsym_cmds=$lt_archive_expsym_cmds_CXX
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds_CXX
-module_expsym_cmds=$lt_module_expsym_cmds_CXX
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld_CXX
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag_CXX
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag_CXX
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX
-
-# If ld is used when linking, flag to hardcode \$libdir into a binary
-# during linking. This must work even if \$libdir does not exist.
-hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_CXX
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct_CXX
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute_CXX
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L_CXX
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic_CXX
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath_CXX
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs_CXX
-
-# Fix the shell variable \$srcfile for the compiler.
-fix_srcfile_path=$lt_fix_srcfile_path_CXX
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols_CXX
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds_CXX
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms_CXX
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms_CXX
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds_CXX
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec_CXX
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action_CXX
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects_CXX
-postdep_objects=$lt_postdep_objects_CXX
-predeps=$lt_predeps_CXX
-postdeps=$lt_postdeps_CXX
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path_CXX
-
-# ### END LIBTOOL TAG CONFIG: CXX
-_LT_EOF
-
- ;;
-
- esac
-done # for ac_tag
-
-
-as_fn_exit 0
-_ACEOF
-ac_clean_files=$ac_clean_files_save
-
-test $ac_write_fail = 0 ||
- as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
-
-
-# configure is writing to config.log, and then calls config.status.
-# config.status does its own redirection, appending to config.log.
-# Unfortunately, on DOS this fails, as config.log is still kept open
-# by configure, so config.status won't be able to write to it; its
-# output is simply discarded. So we exec the FD to /dev/null,
-# effectively closing config.log, so it can be properly (re)opened and
-# appended to by config.status. When coming back to configure, we
-# need to make the FD available again.
-if test "$no_create" != yes; then
- ac_cs_success=:
- ac_config_status_args=
- test "$silent" = yes &&
- ac_config_status_args="$ac_config_status_args --quiet"
- exec 5>/dev/null
- $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
- exec 5>>config.log
- # Use ||, not &&, to avoid exiting from the if with $? = 1, which
- # would make configure fail if this is the last instruction.
- $ac_cs_success || as_fn_exit 1
-fi
-if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
-$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
-fi
-
diff --git a/scripts/training/compact-rule-table/configure.ac b/scripts/training/compact-rule-table/configure.ac
deleted file mode 100644
index 14302cfe6..000000000
--- a/scripts/training/compact-rule-table/configure.ac
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- Autoconf -*-
-# Process this file with autoconf to produce a configure script.
-
-AC_PREREQ([2.65])
-AC_INIT([moses-compact-rule-table], [1.0], [moses-support@mit.edu])
-AM_INIT_AUTOMAKE([foreign])
-AC_CONFIG_SRCDIR([tools/Compactify.cpp])
-AC_CONFIG_HEADERS([config.h])
-AC_CONFIG_MACRO_DIR([m4])
-AC_LANG([C++])
-
-# Checks for programs.
-AC_PROG_CXX
-AC_PROG_INSTALL
-AC_PROG_LIBTOOL
-
-# Checks for libraries.
-BOOST_REQUIRE
-BOOST_PROGRAM_OPTIONS
-BOOST_STRING_ALGO
-BOOST_UNORDERED
-
-# Checks for header files.
-
-# Checks for typedefs, structures, and compiler characteristics.
-AC_HEADER_STDBOOL
-AC_TYPE_SIZE_T
-
-# Checks for library functions.
-AC_FUNC_ERROR_AT_LINE
-
-AC_CONFIG_FILES([Makefile tools/Makefile])
-AC_OUTPUT
diff --git a/scripts/training/compact-rule-table/depcomp b/scripts/training/compact-rule-table/depcomp
deleted file mode 100755
index df8eea7e4..000000000
--- a/scripts/training/compact-rule-table/depcomp
+++ /dev/null
@@ -1,630 +0,0 @@
-#! /bin/sh
-# depcomp - compile a program generating dependencies as side-effects
-
-scriptversion=2009-04-28.21; # UTC
-
-# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2009 Free
-# Software Foundation, Inc.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2, or (at your option)
-# any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
-
-case $1 in
- '')
- echo "$0: No command. Try \`$0 --help' for more information." 1>&2
- exit 1;
- ;;
- -h | --h*)
- cat <<\EOF
-Usage: depcomp [--help] [--version] PROGRAM [ARGS]
-
-Run PROGRAMS ARGS to compile a file, generating dependencies
-as side-effects.
-
-Environment variables:
- depmode Dependency tracking mode.
- source Source file read by `PROGRAMS ARGS'.
- object Object file output by `PROGRAMS ARGS'.
- DEPDIR directory where to store dependencies.
- depfile Dependency file to output.
- tmpdepfile Temporary file to use when outputing dependencies.
- libtool Whether libtool is used (yes/no).
-
-Report bugs to <bug-automake@gnu.org>.
-EOF
- exit $?
- ;;
- -v | --v*)
- echo "depcomp $scriptversion"
- exit $?
- ;;
-esac
-
-if test -z "$depmode" || test -z "$source" || test -z "$object"; then
- echo "depcomp: Variables source, object and depmode must be set" 1>&2
- exit 1
-fi
-
-# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
-depfile=${depfile-`echo "$object" |
- sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
-tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
-
-rm -f "$tmpdepfile"
-
-# Some modes work just like other modes, but use different flags. We
-# parameterize here, but still list the modes in the big case below,
-# to make depend.m4 easier to write. Note that we *cannot* use a case
-# here, because this file can only contain one case statement.
-if test "$depmode" = hp; then
- # HP compiler uses -M and no extra arg.
- gccflag=-M
- depmode=gcc
-fi
-
-if test "$depmode" = dashXmstdout; then
- # This is just like dashmstdout with a different argument.
- dashmflag=-xM
- depmode=dashmstdout
-fi
-
-cygpath_u="cygpath -u -f -"
-if test "$depmode" = msvcmsys; then
- # This is just like msvisualcpp but w/o cygpath translation.
- # Just convert the backslash-escaped backslashes to single forward
- # slashes to satisfy depend.m4
- cygpath_u="sed s,\\\\\\\\,/,g"
- depmode=msvisualcpp
-fi
-
-case "$depmode" in
-gcc3)
-## gcc 3 implements dependency tracking that does exactly what
-## we want. Yay! Note: for some reason libtool 1.4 doesn't like
-## it if -MD -MP comes after the -MF stuff. Hmm.
-## Unfortunately, FreeBSD c89 acceptance of flags depends upon
-## the command line argument order; so add the flags where they
-## appear in depend2.am. Note that the slowdown incurred here
-## affects only configure: in makefiles, %FASTDEP% shortcuts this.
- for arg
- do
- case $arg in
- -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
- *) set fnord "$@" "$arg" ;;
- esac
- shift # fnord
- shift # $arg
- done
- "$@"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- mv "$tmpdepfile" "$depfile"
- ;;
-
-gcc)
-## There are various ways to get dependency output from gcc. Here's
-## why we pick this rather obscure method:
-## - Don't want to use -MD because we'd like the dependencies to end
-## up in a subdir. Having to rename by hand is ugly.
-## (We might end up doing this anyway to support other compilers.)
-## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
-## -MM, not -M (despite what the docs say).
-## - Using -M directly means running the compiler twice (even worse
-## than renaming).
- if test -z "$gccflag"; then
- gccflag=-MD,
- fi
- "$@" -Wp,"$gccflag$tmpdepfile"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
-## The second -e expression handles DOS-style file names with drive letters.
- sed -e 's/^[^:]*: / /' \
- -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
-## This next piece of magic avoids the `deleted header file' problem.
-## The problem is that when a header file which appears in a .P file
-## is deleted, the dependency causes make to die (because there is
-## typically no way to rebuild the header). We avoid this by adding
-## dummy dependencies for each header file. Too bad gcc doesn't do
-## this for us directly.
- tr ' ' '
-' < "$tmpdepfile" |
-## Some versions of gcc put a space before the `:'. On the theory
-## that the space means something, we add a space to the output as
-## well.
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-hp)
- # This case exists only to let depend.m4 do its work. It works by
- # looking at the text of this script. This case will never be run,
- # since it is checked for above.
- exit 1
- ;;
-
-sgi)
- if test "$libtool" = yes; then
- "$@" "-Wp,-MDupdate,$tmpdepfile"
- else
- "$@" -MDupdate "$tmpdepfile"
- fi
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
-
- if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
- echo "$object : \\" > "$depfile"
-
- # Clip off the initial element (the dependent). Don't try to be
- # clever and replace this with sed code, as IRIX sed won't handle
- # lines with more than a fixed number of characters (4096 in
- # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
- # the IRIX cc adds comments like `#:fec' to the end of the
- # dependency line.
- tr ' ' '
-' < "$tmpdepfile" \
- | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \
- tr '
-' ' ' >> "$depfile"
- echo >> "$depfile"
-
- # The second pass generates a dummy entry for each header file.
- tr ' ' '
-' < "$tmpdepfile" \
- | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
- >> "$depfile"
- else
- # The sourcefile does not contain any dependencies, so just
- # store a dummy comment line, to avoid errors with the Makefile
- # "include basename.Plo" scheme.
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-aix)
- # The C for AIX Compiler uses -M and outputs the dependencies
- # in a .u file. In older versions, this file always lives in the
- # current directory. Also, the AIX compiler puts `$object:' at the
- # start of each line; $object doesn't have directory information.
- # Version 6 uses the directory in both cases.
- dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
- test "x$dir" = "x$object" && dir=
- base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
- if test "$libtool" = yes; then
- tmpdepfile1=$dir$base.u
- tmpdepfile2=$base.u
- tmpdepfile3=$dir.libs/$base.u
- "$@" -Wc,-M
- else
- tmpdepfile1=$dir$base.u
- tmpdepfile2=$dir$base.u
- tmpdepfile3=$dir$base.u
- "$@" -M
- fi
- stat=$?
-
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
- exit $stat
- fi
-
- for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
- do
- test -f "$tmpdepfile" && break
- done
- if test -f "$tmpdepfile"; then
- # Each line is of the form `foo.o: dependent.h'.
- # Do two passes, one to just change these to
- # `$object: dependent.h' and one to simply `dependent.h:'.
- sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
- # That's a tab and a space in the [].
- sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
- else
- # The sourcefile does not contain any dependencies, so just
- # store a dummy comment line, to avoid errors with the Makefile
- # "include basename.Plo" scheme.
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-icc)
- # Intel's C compiler understands `-MD -MF file'. However on
- # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c
- # ICC 7.0 will fill foo.d with something like
- # foo.o: sub/foo.c
- # foo.o: sub/foo.h
- # which is wrong. We want:
- # sub/foo.o: sub/foo.c
- # sub/foo.o: sub/foo.h
- # sub/foo.c:
- # sub/foo.h:
- # ICC 7.1 will output
- # foo.o: sub/foo.c sub/foo.h
- # and will wrap long lines using \ :
- # foo.o: sub/foo.c ... \
- # sub/foo.h ... \
- # ...
-
- "$@" -MD -MF "$tmpdepfile"
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile"
- exit $stat
- fi
- rm -f "$depfile"
- # Each line is of the form `foo.o: dependent.h',
- # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
- # Do two passes, one to just change these to
- # `$object: dependent.h' and one to simply `dependent.h:'.
- sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
- # Some versions of the HPUX 10.20 sed can't process this invocation
- # correctly. Breaking it into two sed invocations is a workaround.
- sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" |
- sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-hp2)
- # The "hp" stanza above does not work with aCC (C++) and HP's ia64
- # compilers, which have integrated preprocessors. The correct option
- # to use with these is +Maked; it writes dependencies to a file named
- # 'foo.d', which lands next to the object file, wherever that
- # happens to be.
- # Much of this is similar to the tru64 case; see comments there.
- dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
- test "x$dir" = "x$object" && dir=
- base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
- if test "$libtool" = yes; then
- tmpdepfile1=$dir$base.d
- tmpdepfile2=$dir.libs/$base.d
- "$@" -Wc,+Maked
- else
- tmpdepfile1=$dir$base.d
- tmpdepfile2=$dir$base.d
- "$@" +Maked
- fi
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile1" "$tmpdepfile2"
- exit $stat
- fi
-
- for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
- do
- test -f "$tmpdepfile" && break
- done
- if test -f "$tmpdepfile"; then
- sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile"
- # Add `dependent.h:' lines.
- sed -ne '2,${
- s/^ *//
- s/ \\*$//
- s/$/:/
- p
- }' "$tmpdepfile" >> "$depfile"
- else
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile" "$tmpdepfile2"
- ;;
-
-tru64)
- # The Tru64 compiler uses -MD to generate dependencies as a side
- # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'.
- # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
- # dependencies in `foo.d' instead, so we check for that too.
- # Subdirectories are respected.
- dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
- test "x$dir" = "x$object" && dir=
- base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
-
- if test "$libtool" = yes; then
- # With Tru64 cc, shared objects can also be used to make a
- # static library. This mechanism is used in libtool 1.4 series to
- # handle both shared and static libraries in a single compilation.
- # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d.
- #
- # With libtool 1.5 this exception was removed, and libtool now
- # generates 2 separate objects for the 2 libraries. These two
- # compilations output dependencies in $dir.libs/$base.o.d and
- # in $dir$base.o.d. We have to check for both files, because
- # one of the two compilations can be disabled. We should prefer
- # $dir$base.o.d over $dir.libs/$base.o.d because the latter is
- # automatically cleaned when .libs/ is deleted, while ignoring
- # the former would cause a distcleancheck panic.
- tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4
- tmpdepfile2=$dir$base.o.d # libtool 1.5
- tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5
- tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504
- "$@" -Wc,-MD
- else
- tmpdepfile1=$dir$base.o.d
- tmpdepfile2=$dir$base.d
- tmpdepfile3=$dir$base.d
- tmpdepfile4=$dir$base.d
- "$@" -MD
- fi
-
- stat=$?
- if test $stat -eq 0; then :
- else
- rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
- exit $stat
- fi
-
- for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
- do
- test -f "$tmpdepfile" && break
- done
- if test -f "$tmpdepfile"; then
- sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
- # That's a tab and a space in the [].
- sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
- else
- echo "#dummy" > "$depfile"
- fi
- rm -f "$tmpdepfile"
- ;;
-
-#nosideeffect)
- # This comment above is used by automake to tell side-effect
- # dependency tracking mechanisms from slower ones.
-
-dashmstdout)
- # Important note: in order to support this mode, a compiler *must*
- # always write the preprocessed file to stdout, regardless of -o.
- "$@" || exit $?
-
- # Remove the call to Libtool.
- if test "$libtool" = yes; then
- while test "X$1" != 'X--mode=compile'; do
- shift
- done
- shift
- fi
-
- # Remove `-o $object'.
- IFS=" "
- for arg
- do
- case $arg in
- -o)
- shift
- ;;
- $object)
- shift
- ;;
- *)
- set fnord "$@" "$arg"
- shift # fnord
- shift # $arg
- ;;
- esac
- done
-
- test -z "$dashmflag" && dashmflag=-M
- # Require at least two characters before searching for `:'
- # in the target name. This is to cope with DOS-style filenames:
- # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise.
- "$@" $dashmflag |
- sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile"
- rm -f "$depfile"
- cat < "$tmpdepfile" > "$depfile"
- tr ' ' '
-' < "$tmpdepfile" | \
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-dashXmstdout)
- # This case only exists to satisfy depend.m4. It is never actually
- # run, as this mode is specially recognized in the preamble.
- exit 1
- ;;
-
-makedepend)
- "$@" || exit $?
- # Remove any Libtool call
- if test "$libtool" = yes; then
- while test "X$1" != 'X--mode=compile'; do
- shift
- done
- shift
- fi
- # X makedepend
- shift
- cleared=no eat=no
- for arg
- do
- case $cleared in
- no)
- set ""; shift
- cleared=yes ;;
- esac
- if test $eat = yes; then
- eat=no
- continue
- fi
- case "$arg" in
- -D*|-I*)
- set fnord "$@" "$arg"; shift ;;
- # Strip any option that makedepend may not understand. Remove
- # the object too, otherwise makedepend will parse it as a source file.
- -arch)
- eat=yes ;;
- -*|$object)
- ;;
- *)
- set fnord "$@" "$arg"; shift ;;
- esac
- done
- obj_suffix=`echo "$object" | sed 's/^.*\././'`
- touch "$tmpdepfile"
- ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
- rm -f "$depfile"
- cat < "$tmpdepfile" > "$depfile"
- sed '1,2d' "$tmpdepfile" | tr ' ' '
-' | \
-## Some versions of the HPUX 10.20 sed can't process this invocation
-## correctly. Breaking it into two sed invocations is a workaround.
- sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile" "$tmpdepfile".bak
- ;;
-
-cpp)
- # Important note: in order to support this mode, a compiler *must*
- # always write the preprocessed file to stdout.
- "$@" || exit $?
-
- # Remove the call to Libtool.
- if test "$libtool" = yes; then
- while test "X$1" != 'X--mode=compile'; do
- shift
- done
- shift
- fi
-
- # Remove `-o $object'.
- IFS=" "
- for arg
- do
- case $arg in
- -o)
- shift
- ;;
- $object)
- shift
- ;;
- *)
- set fnord "$@" "$arg"
- shift # fnord
- shift # $arg
- ;;
- esac
- done
-
- "$@" -E |
- sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
- -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' |
- sed '$ s: \\$::' > "$tmpdepfile"
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- cat < "$tmpdepfile" >> "$depfile"
- sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-msvisualcpp)
- # Important note: in order to support this mode, a compiler *must*
- # always write the preprocessed file to stdout.
- "$@" || exit $?
-
- # Remove the call to Libtool.
- if test "$libtool" = yes; then
- while test "X$1" != 'X--mode=compile'; do
- shift
- done
- shift
- fi
-
- IFS=" "
- for arg
- do
- case "$arg" in
- -o)
- shift
- ;;
- $object)
- shift
- ;;
- "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
- set fnord "$@"
- shift
- shift
- ;;
- *)
- set fnord "$@" "$arg"
- shift
- shift
- ;;
- esac
- done
- "$@" -E 2>/dev/null |
- sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
- rm -f "$depfile"
- echo "$object : \\" > "$depfile"
- sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile"
- echo " " >> "$depfile"
- sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
- rm -f "$tmpdepfile"
- ;;
-
-msvcmsys)
- # This case exists only to let depend.m4 do its work. It works by
- # looking at the text of this script. This case will never be run,
- # since it is checked for above.
- exit 1
- ;;
-
-none)
- exec "$@"
- ;;
-
-*)
- echo "Unknown depmode $depmode" 1>&2
- exit 1
- ;;
-esac
-
-exit 0
-
-# Local Variables:
-# mode: shell-script
-# sh-indentation: 2
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "scriptversion="
-# time-stamp-format: "%:y-%02m-%02d.%02H"
-# time-stamp-time-zone: "UTC"
-# time-stamp-end: "; # UTC"
-# End:
diff --git a/scripts/training/compact-rule-table/install-sh b/scripts/training/compact-rule-table/install-sh
deleted file mode 100755
index 6781b987b..000000000
--- a/scripts/training/compact-rule-table/install-sh
+++ /dev/null
@@ -1,520 +0,0 @@
-#!/bin/sh
-# install - install a program, script, or datafile
-
-scriptversion=2009-04-28.21; # UTC
-
-# This originates from X11R5 (mit/util/scripts/install.sh), which was
-# later released in X11R6 (xc/config/util/install.sh) with the
-# following copyright and license.
-#
-# Copyright (C) 1994 X Consortium
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to
-# deal in the Software without restriction, including without limitation the
-# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-# sell copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
-# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
-# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-# Except as contained in this notice, the name of the X Consortium shall not
-# be used in advertising or otherwise to promote the sale, use or other deal-
-# ings in this Software without prior written authorization from the X Consor-
-# tium.
-#
-#
-# FSF changes to this file are in the public domain.
-#
-# Calling this script install-sh is preferred over install.sh, to prevent
-# `make' implicit rules from creating a file called install from it
-# when there is no Makefile.
-#
-# This script is compatible with the BSD install script, but was written
-# from scratch.
-
-nl='
-'
-IFS=" "" $nl"
-
-# set DOITPROG to echo to test this script
-
-# Don't use :- since 4.3BSD and earlier shells don't like it.
-doit=${DOITPROG-}
-if test -z "$doit"; then
- doit_exec=exec
-else
- doit_exec=$doit
-fi
-
-# Put in absolute file names if you don't have them in your path;
-# or use environment vars.
-
-chgrpprog=${CHGRPPROG-chgrp}
-chmodprog=${CHMODPROG-chmod}
-chownprog=${CHOWNPROG-chown}
-cmpprog=${CMPPROG-cmp}
-cpprog=${CPPROG-cp}
-mkdirprog=${MKDIRPROG-mkdir}
-mvprog=${MVPROG-mv}
-rmprog=${RMPROG-rm}
-stripprog=${STRIPPROG-strip}
-
-posix_glob='?'
-initialize_posix_glob='
- test "$posix_glob" != "?" || {
- if (set -f) 2>/dev/null; then
- posix_glob=
- else
- posix_glob=:
- fi
- }
-'
-
-posix_mkdir=
-
-# Desired mode of installed file.
-mode=0755
-
-chgrpcmd=
-chmodcmd=$chmodprog
-chowncmd=
-mvcmd=$mvprog
-rmcmd="$rmprog -f"
-stripcmd=
-
-src=
-dst=
-dir_arg=
-dst_arg=
-
-copy_on_change=false
-no_target_directory=
-
-usage="\
-Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
- or: $0 [OPTION]... SRCFILES... DIRECTORY
- or: $0 [OPTION]... -t DIRECTORY SRCFILES...
- or: $0 [OPTION]... -d DIRECTORIES...
-
-In the 1st form, copy SRCFILE to DSTFILE.
-In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
-In the 4th, create DIRECTORIES.
-
-Options:
- --help display this help and exit.
- --version display version info and exit.
-
- -c (ignored)
- -C install only if different (preserve the last data modification time)
- -d create directories instead of installing files.
- -g GROUP $chgrpprog installed files to GROUP.
- -m MODE $chmodprog installed files to MODE.
- -o USER $chownprog installed files to USER.
- -s $stripprog installed files.
- -t DIRECTORY install into DIRECTORY.
- -T report an error if DSTFILE is a directory.
-
-Environment variables override the default commands:
- CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
- RMPROG STRIPPROG
-"
-
-while test $# -ne 0; do
- case $1 in
- -c) ;;
-
- -C) copy_on_change=true;;
-
- -d) dir_arg=true;;
-
- -g) chgrpcmd="$chgrpprog $2"
- shift;;
-
- --help) echo "$usage"; exit $?;;
-
- -m) mode=$2
- case $mode in
- *' '* | *' '* | *'
-'* | *'*'* | *'?'* | *'['*)
- echo "$0: invalid mode: $mode" >&2
- exit 1;;
- esac
- shift;;
-
- -o) chowncmd="$chownprog $2"
- shift;;
-
- -s) stripcmd=$stripprog;;
-
- -t) dst_arg=$2
- shift;;
-
- -T) no_target_directory=true;;
-
- --version) echo "$0 $scriptversion"; exit $?;;
-
- --) shift
- break;;
-
- -*) echo "$0: invalid option: $1" >&2
- exit 1;;
-
- *) break;;
- esac
- shift
-done
-
-if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
- # When -d is used, all remaining arguments are directories to create.
- # When -t is used, the destination is already specified.
- # Otherwise, the last argument is the destination. Remove it from $@.
- for arg
- do
- if test -n "$dst_arg"; then
- # $@ is not empty: it contains at least $arg.
- set fnord "$@" "$dst_arg"
- shift # fnord
- fi
- shift # arg
- dst_arg=$arg
- done
-fi
-
-if test $# -eq 0; then
- if test -z "$dir_arg"; then
- echo "$0: no input file specified." >&2
- exit 1
- fi
- # It's OK to call `install-sh -d' without argument.
- # This can happen when creating conditional directories.
- exit 0
-fi
-
-if test -z "$dir_arg"; then
- trap '(exit $?); exit' 1 2 13 15
-
- # Set umask so as not to create temps with too-generous modes.
- # However, 'strip' requires both read and write access to temps.
- case $mode in
- # Optimize common cases.
- *644) cp_umask=133;;
- *755) cp_umask=22;;
-
- *[0-7])
- if test -z "$stripcmd"; then
- u_plus_rw=
- else
- u_plus_rw='% 200'
- fi
- cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
- *)
- if test -z "$stripcmd"; then
- u_plus_rw=
- else
- u_plus_rw=,u+rw
- fi
- cp_umask=$mode$u_plus_rw;;
- esac
-fi
-
-for src
-do
- # Protect names starting with `-'.
- case $src in
- -*) src=./$src;;
- esac
-
- if test -n "$dir_arg"; then
- dst=$src
- dstdir=$dst
- test -d "$dstdir"
- dstdir_status=$?
- else
-
- # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
- # might cause directories to be created, which would be especially bad
- # if $src (and thus $dsttmp) contains '*'.
- if test ! -f "$src" && test ! -d "$src"; then
- echo "$0: $src does not exist." >&2
- exit 1
- fi
-
- if test -z "$dst_arg"; then
- echo "$0: no destination specified." >&2
- exit 1
- fi
-
- dst=$dst_arg
- # Protect names starting with `-'.
- case $dst in
- -*) dst=./$dst;;
- esac
-
- # If destination is a directory, append the input filename; won't work
- # if double slashes aren't ignored.
- if test -d "$dst"; then
- if test -n "$no_target_directory"; then
- echo "$0: $dst_arg: Is a directory" >&2
- exit 1
- fi
- dstdir=$dst
- dst=$dstdir/`basename "$src"`
- dstdir_status=0
- else
- # Prefer dirname, but fall back on a substitute if dirname fails.
- dstdir=`
- (dirname "$dst") 2>/dev/null ||
- expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
- X"$dst" : 'X\(//\)[^/]' \| \
- X"$dst" : 'X\(//\)$' \| \
- X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
- echo X"$dst" |
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
- s//\1/
- q
- }
- /^X\(\/\/\)[^/].*/{
- s//\1/
- q
- }
- /^X\(\/\/\)$/{
- s//\1/
- q
- }
- /^X\(\/\).*/{
- s//\1/
- q
- }
- s/.*/./; q'
- `
-
- test -d "$dstdir"
- dstdir_status=$?
- fi
- fi
-
- obsolete_mkdir_used=false
-
- if test $dstdir_status != 0; then
- case $posix_mkdir in
- '')
- # Create intermediate dirs using mode 755 as modified by the umask.
- # This is like FreeBSD 'install' as of 1997-10-28.
- umask=`umask`
- case $stripcmd.$umask in
- # Optimize common cases.
- *[2367][2367]) mkdir_umask=$umask;;
- .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
-
- *[0-7])
- mkdir_umask=`expr $umask + 22 \
- - $umask % 100 % 40 + $umask % 20 \
- - $umask % 10 % 4 + $umask % 2
- `;;
- *) mkdir_umask=$umask,go-w;;
- esac
-
- # With -d, create the new directory with the user-specified mode.
- # Otherwise, rely on $mkdir_umask.
- if test -n "$dir_arg"; then
- mkdir_mode=-m$mode
- else
- mkdir_mode=
- fi
-
- posix_mkdir=false
- case $umask in
- *[123567][0-7][0-7])
- # POSIX mkdir -p sets u+wx bits regardless of umask, which
- # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
- ;;
- *)
- tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
- trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
-
- if (umask $mkdir_umask &&
- exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
- then
- if test -z "$dir_arg" || {
- # Check for POSIX incompatibilities with -m.
- # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
- # other-writeable bit of parent directory when it shouldn't.
- # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
- ls_ld_tmpdir=`ls -ld "$tmpdir"`
- case $ls_ld_tmpdir in
- d????-?r-*) different_mode=700;;
- d????-?--*) different_mode=755;;
- *) false;;
- esac &&
- $mkdirprog -m$different_mode -p -- "$tmpdir" && {
- ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
- test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
- }
- }
- then posix_mkdir=:
- fi
- rmdir "$tmpdir/d" "$tmpdir"
- else
- # Remove any dirs left behind by ancient mkdir implementations.
- rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
- fi
- trap '' 0;;
- esac;;
- esac
-
- if
- $posix_mkdir && (
- umask $mkdir_umask &&
- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
- )
- then :
- else
-
- # The umask is ridiculous, or mkdir does not conform to POSIX,
- # or it failed possibly due to a race condition. Create the
- # directory the slow way, step by step, checking for races as we go.
-
- case $dstdir in
- /*) prefix='/';;
- -*) prefix='./';;
- *) prefix='';;
- esac
-
- eval "$initialize_posix_glob"
-
- oIFS=$IFS
- IFS=/
- $posix_glob set -f
- set fnord $dstdir
- shift
- $posix_glob set +f
- IFS=$oIFS
-
- prefixes=
-
- for d
- do
- test -z "$d" && continue
-
- prefix=$prefix$d
- if test -d "$prefix"; then
- prefixes=
- else
- if $posix_mkdir; then
- (umask=$mkdir_umask &&
- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
- # Don't fail if two instances are running concurrently.
- test -d "$prefix" || exit 1
- else
- case $prefix in
- *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
- *) qprefix=$prefix;;
- esac
- prefixes="$prefixes '$qprefix'"
- fi
- fi
- prefix=$prefix/
- done
-
- if test -n "$prefixes"; then
- # Don't fail if two instances are running concurrently.
- (umask $mkdir_umask &&
- eval "\$doit_exec \$mkdirprog $prefixes") ||
- test -d "$dstdir" || exit 1
- obsolete_mkdir_used=true
- fi
- fi
- fi
-
- if test -n "$dir_arg"; then
- { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
- { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
- { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
- test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
- else
-
- # Make a couple of temp file names in the proper directory.
- dsttmp=$dstdir/_inst.$$_
- rmtmp=$dstdir/_rm.$$_
-
- # Trap to clean up those temp files at exit.
- trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
-
- # Copy the file name to the temp name.
- (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
-
- # and set any options; do chmod last to preserve setuid bits.
- #
- # If any of these fail, we abort the whole thing. If we want to
- # ignore errors from any of these, just make sure not to ignore
- # errors from the above "$doit $cpprog $src $dsttmp" command.
- #
- { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
- { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
- { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
- { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
-
- # If -C, don't bother to copy if it wouldn't change the file.
- if $copy_on_change &&
- old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
- new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
-
- eval "$initialize_posix_glob" &&
- $posix_glob set -f &&
- set X $old && old=:$2:$4:$5:$6 &&
- set X $new && new=:$2:$4:$5:$6 &&
- $posix_glob set +f &&
-
- test "$old" = "$new" &&
- $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
- then
- rm -f "$dsttmp"
- else
- # Rename the file to the real destination.
- $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
-
- # The rename failed, perhaps because mv can't rename something else
- # to itself, or perhaps because mv is so ancient that it does not
- # support -f.
- {
- # Now remove or move aside any old file at destination location.
- # We try this two ways since rm can't unlink itself on some
- # systems and the destination file might be busy for other
- # reasons. In this case, the final cleanup might fail but the new
- # file should still install successfully.
- {
- test ! -f "$dst" ||
- $doit $rmcmd -f "$dst" 2>/dev/null ||
- { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
- { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
- } ||
- { echo "$0: cannot unlink or rename $dst" >&2
- (exit 1); exit 1
- }
- } &&
-
- # Now rename the file to the real destination.
- $doit $mvcmd "$dsttmp" "$dst"
- }
- fi || exit 1
-
- trap '' 0
- fi
-done
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "scriptversion="
-# time-stamp-format: "%:y-%02m-%02d.%02H"
-# time-stamp-time-zone: "UTC"
-# time-stamp-end: "; # UTC"
-# End:
diff --git a/scripts/training/compact-rule-table/ltmain.sh b/scripts/training/compact-rule-table/ltmain.sh
deleted file mode 100755
index a72f2fd78..000000000
--- a/scripts/training/compact-rule-table/ltmain.sh
+++ /dev/null
@@ -1,8406 +0,0 @@
-# Generated from ltmain.m4sh.
-
-# ltmain.sh (GNU libtool) 2.2.6b
-# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
-
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.
-# This is free software; see the source for copying conditions. There is NO
-# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
-# GNU Libtool is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# As a special exception to the GNU General Public License,
-# if you distribute this file as part of a program or library that
-# is built using GNU Libtool, you may include this file under the
-# same distribution terms that you use for the rest of that program.
-#
-# GNU Libtool is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Libtool; see the file COPYING. If not, a copy
-# can be downloaded from http://www.gnu.org/licenses/gpl.html,
-# or obtained by writing to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-# Usage: $progname [OPTION]... [MODE-ARG]...
-#
-# Provide generalized library-building support services.
-#
-# --config show all configuration variables
-# --debug enable verbose shell tracing
-# -n, --dry-run display commands without modifying any files
-# --features display basic configuration information and exit
-# --mode=MODE use operation mode MODE
-# --preserve-dup-deps don't remove duplicate dependency libraries
-# --quiet, --silent don't print informational messages
-# --tag=TAG use configuration variables from tag TAG
-# -v, --verbose print informational messages (default)
-# --version print version information
-# -h, --help print short or long help message
-#
-# MODE must be one of the following:
-#
-# clean remove files from the build directory
-# compile compile a source file into a libtool object
-# execute automatically set library path, then run a program
-# finish complete the installation of libtool libraries
-# install install libraries or executables
-# link create a library or an executable
-# uninstall remove libraries from an installed directory
-#
-# MODE-ARGS vary depending on the MODE.
-# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
-#
-# When reporting a bug, please describe a test case to reproduce it and
-# include the following information:
-#
-# host-triplet: $host
-# shell: $SHELL
-# compiler: $LTCC
-# compiler flags: $LTCFLAGS
-# linker: $LD (gnu? $with_gnu_ld)
-# $progname: (GNU libtool) 2.2.6b
-# automake: $automake_version
-# autoconf: $autoconf_version
-#
-# Report bugs to <bug-libtool@gnu.org>.
-
-PROGRAM=ltmain.sh
-PACKAGE=libtool
-VERSION=2.2.6b
-TIMESTAMP=""
-package_revision=1.3017
-
-# Be Bourne compatible
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
- emulate sh
- NULLCMD=:
- # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
- # is contrary to our usage. Disable this feature.
- alias -g '${1+"$@"}'='"$@"'
- setopt NO_GLOB_SUBST
-else
- case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
-fi
-BIN_SH=xpg4; export BIN_SH # for Tru64
-DUALCASE=1; export DUALCASE # for MKS sh
-
-# NLS nuisances: We save the old values to restore during execute mode.
-# Only set LANG and LC_ALL to C if already set.
-# These must not be set unconditionally because not all systems understand
-# e.g. LANG=C (notably SCO).
-lt_user_locale=
-lt_safe_locale=
-for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
-do
- eval "if test \"\${$lt_var+set}\" = set; then
- save_$lt_var=\$$lt_var
- $lt_var=C
- export $lt_var
- lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
- lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
- fi"
-done
-
-$lt_unset CDPATH
-
-
-
-
-
-: ${CP="cp -f"}
-: ${ECHO="echo"}
-: ${EGREP="/bin/grep -E"}
-: ${FGREP="/bin/grep -F"}
-: ${GREP="/bin/grep"}
-: ${LN_S="ln -s"}
-: ${MAKE="make"}
-: ${MKDIR="mkdir"}
-: ${MV="mv -f"}
-: ${RM="rm -f"}
-: ${SED="/bin/sed"}
-: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
-: ${Xsed="$SED -e 1s/^X//"}
-
-# Global variables:
-EXIT_SUCCESS=0
-EXIT_FAILURE=1
-EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing.
-EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake.
-
-exit_status=$EXIT_SUCCESS
-
-# Make sure IFS has a sensible default
-lt_nl='
-'
-IFS=" $lt_nl"
-
-dirname="s,/[^/]*$,,"
-basename="s,^.*/,,"
-
-# func_dirname_and_basename file append nondir_replacement
-# perform func_basename and func_dirname in a single function
-# call:
-# dirname: Compute the dirname of FILE. If nonempty,
-# add APPEND to the result, otherwise set result
-# to NONDIR_REPLACEMENT.
-# value returned in "$func_dirname_result"
-# basename: Compute filename of FILE.
-# value retuned in "$func_basename_result"
-# Implementation must be kept synchronized with func_dirname
-# and func_basename. For efficiency, we do not delegate to
-# those functions but instead duplicate the functionality here.
-func_dirname_and_basename ()
-{
- # Extract subdirectory from the argument.
- func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"`
- if test "X$func_dirname_result" = "X${1}"; then
- func_dirname_result="${3}"
- else
- func_dirname_result="$func_dirname_result${2}"
- fi
- func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"`
-}
-
-# Generated shell functions inserted here.
-
-# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
-# is ksh but when the shell is invoked as "sh" and the current value of
-# the _XPG environment variable is not equal to 1 (one), the special
-# positional parameter $0, within a function call, is the name of the
-# function.
-progpath="$0"
-
-# The name of this program:
-# In the unlikely event $progname began with a '-', it would play havoc with
-# func_echo (imagine progname=-n), so we prepend ./ in that case:
-func_dirname_and_basename "$progpath"
-progname=$func_basename_result
-case $progname in
- -*) progname=./$progname ;;
-esac
-
-# Make sure we have an absolute path for reexecution:
-case $progpath in
- [\\/]*|[A-Za-z]:\\*) ;;
- *[\\/]*)
- progdir=$func_dirname_result
- progdir=`cd "$progdir" && pwd`
- progpath="$progdir/$progname"
- ;;
- *)
- save_IFS="$IFS"
- IFS=:
- for progdir in $PATH; do
- IFS="$save_IFS"
- test -x "$progdir/$progname" && break
- done
- IFS="$save_IFS"
- test -n "$progdir" || progdir=`pwd`
- progpath="$progdir/$progname"
- ;;
-esac
-
-# Sed substitution that helps us do robust quoting. It backslashifies
-# metacharacters that are still active within double-quoted strings.
-Xsed="${SED}"' -e 1s/^X//'
-sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
-
-# Same as above, but do not quote variable references.
-double_quote_subst='s/\(["`\\]\)/\\\1/g'
-
-# Re-`\' parameter expansions in output of double_quote_subst that were
-# `\'-ed in input to the same. If an odd number of `\' preceded a '$'
-# in input to double_quote_subst, that '$' was protected from expansion.
-# Since each input `\' is now two `\'s, look for any number of runs of
-# four `\'s followed by two `\'s and then a '$'. `\' that '$'.
-bs='\\'
-bs2='\\\\'
-bs4='\\\\\\\\'
-dollar='\$'
-sed_double_backslash="\
- s/$bs4/&\\
-/g
- s/^$bs2$dollar/$bs&/
- s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
- s/\n//g"
-
-# Standard options:
-opt_dry_run=false
-opt_help=false
-opt_quiet=false
-opt_verbose=false
-opt_warning=:
-
-# func_echo arg...
-# Echo program name prefixed message, along with the current mode
-# name if it has been set yet.
-func_echo ()
-{
- $ECHO "$progname${mode+: }$mode: $*"
-}
-
-# func_verbose arg...
-# Echo program name prefixed message in verbose mode only.
-func_verbose ()
-{
- $opt_verbose && func_echo ${1+"$@"}
-
- # A bug in bash halts the script if the last line of a function
- # fails when set -e is in force, so we need another command to
- # work around that:
- :
-}
-
-# func_error arg...
-# Echo program name prefixed message to standard error.
-func_error ()
-{
- $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2
-}
-
-# func_warning arg...
-# Echo program name prefixed warning message to standard error.
-func_warning ()
-{
- $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2
-
- # bash bug again:
- :
-}
-
-# func_fatal_error arg...
-# Echo program name prefixed message to standard error, and exit.
-func_fatal_error ()
-{
- func_error ${1+"$@"}
- exit $EXIT_FAILURE
-}
-
-# func_fatal_help arg...
-# Echo program name prefixed message to standard error, followed by
-# a help hint, and exit.
-func_fatal_help ()
-{
- func_error ${1+"$@"}
- func_fatal_error "$help"
-}
-help="Try \`$progname --help' for more information." ## default
-
-
-# func_grep expression filename
-# Check whether EXPRESSION matches any line of FILENAME, without output.
-func_grep ()
-{
- $GREP "$1" "$2" >/dev/null 2>&1
-}
-
-
-# func_mkdir_p directory-path
-# Make sure the entire path to DIRECTORY-PATH is available.
-func_mkdir_p ()
-{
- my_directory_path="$1"
- my_dir_list=
-
- if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
-
- # Protect directory names starting with `-'
- case $my_directory_path in
- -*) my_directory_path="./$my_directory_path" ;;
- esac
-
- # While some portion of DIR does not yet exist...
- while test ! -d "$my_directory_path"; do
- # ...make a list in topmost first order. Use a colon delimited
- # list incase some portion of path contains whitespace.
- my_dir_list="$my_directory_path:$my_dir_list"
-
- # If the last portion added has no slash in it, the list is done
- case $my_directory_path in */*) ;; *) break ;; esac
-
- # ...otherwise throw away the child directory and loop
- my_directory_path=`$ECHO "X$my_directory_path" | $Xsed -e "$dirname"`
- done
- my_dir_list=`$ECHO "X$my_dir_list" | $Xsed -e 's,:*$,,'`
-
- save_mkdir_p_IFS="$IFS"; IFS=':'
- for my_dir in $my_dir_list; do
- IFS="$save_mkdir_p_IFS"
- # mkdir can fail with a `File exist' error if two processes
- # try to create one of the directories concurrently. Don't
- # stop in that case!
- $MKDIR "$my_dir" 2>/dev/null || :
- done
- IFS="$save_mkdir_p_IFS"
-
- # Bail out if we (or some other process) failed to create a directory.
- test -d "$my_directory_path" || \
- func_fatal_error "Failed to create \`$1'"
- fi
-}
-
-
-# func_mktempdir [string]
-# Make a temporary directory that won't clash with other running
-# libtool processes, and avoids race conditions if possible. If
-# given, STRING is the basename for that directory.
-func_mktempdir ()
-{
- my_template="${TMPDIR-/tmp}/${1-$progname}"
-
- if test "$opt_dry_run" = ":"; then
- # Return a directory name, but don't create it in dry-run mode
- my_tmpdir="${my_template}-$$"
- else
-
- # If mktemp works, use that first and foremost
- my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
-
- if test ! -d "$my_tmpdir"; then
- # Failing that, at least try and use $RANDOM to avoid a race
- my_tmpdir="${my_template}-${RANDOM-0}$$"
-
- save_mktempdir_umask=`umask`
- umask 0077
- $MKDIR "$my_tmpdir"
- umask $save_mktempdir_umask
- fi
-
- # If we're not in dry-run mode, bomb out on failure
- test -d "$my_tmpdir" || \
- func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
- fi
-
- $ECHO "X$my_tmpdir" | $Xsed
-}
-
-
-# func_quote_for_eval arg
-# Aesthetically quote ARG to be evaled later.
-# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
-# is double-quoted, suitable for a subsequent eval, whereas
-# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
-# which are still active within double quotes backslashified.
-func_quote_for_eval ()
-{
- case $1 in
- *[\\\`\"\$]*)
- func_quote_for_eval_unquoted_result=`$ECHO "X$1" | $Xsed -e "$sed_quote_subst"` ;;
- *)
- func_quote_for_eval_unquoted_result="$1" ;;
- esac
-
- case $func_quote_for_eval_unquoted_result in
- # Double-quote args containing shell metacharacters to delay
- # word splitting, command substitution and and variable
- # expansion for a subsequent eval.
- # Many Bourne shells cannot handle close brackets correctly
- # in scan sets, so we specify it separately.
- *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
- func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
- ;;
- *)
- func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
- esac
-}
-
-
-# func_quote_for_expand arg
-# Aesthetically quote ARG to be evaled later; same as above,
-# but do not quote variable references.
-func_quote_for_expand ()
-{
- case $1 in
- *[\\\`\"]*)
- my_arg=`$ECHO "X$1" | $Xsed \
- -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
- *)
- my_arg="$1" ;;
- esac
-
- case $my_arg in
- # Double-quote args containing shell metacharacters to delay
- # word splitting and command substitution for a subsequent eval.
- # Many Bourne shells cannot handle close brackets correctly
- # in scan sets, so we specify it separately.
- *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
- my_arg="\"$my_arg\""
- ;;
- esac
-
- func_quote_for_expand_result="$my_arg"
-}
-
-
-# func_show_eval cmd [fail_exp]
-# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is
-# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
-# is given, then evaluate it.
-func_show_eval ()
-{
- my_cmd="$1"
- my_fail_exp="${2-:}"
-
- ${opt_silent-false} || {
- func_quote_for_expand "$my_cmd"
- eval "func_echo $func_quote_for_expand_result"
- }
-
- if ${opt_dry_run-false}; then :; else
- eval "$my_cmd"
- my_status=$?
- if test "$my_status" -eq 0; then :; else
- eval "(exit $my_status); $my_fail_exp"
- fi
- fi
-}
-
-
-# func_show_eval_locale cmd [fail_exp]
-# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is
-# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
-# is given, then evaluate it. Use the saved locale for evaluation.
-func_show_eval_locale ()
-{
- my_cmd="$1"
- my_fail_exp="${2-:}"
-
- ${opt_silent-false} || {
- func_quote_for_expand "$my_cmd"
- eval "func_echo $func_quote_for_expand_result"
- }
-
- if ${opt_dry_run-false}; then :; else
- eval "$lt_user_locale
- $my_cmd"
- my_status=$?
- eval "$lt_safe_locale"
- if test "$my_status" -eq 0; then :; else
- eval "(exit $my_status); $my_fail_exp"
- fi
- fi
-}
-
-
-
-
-
-# func_version
-# Echo version message to standard output and exit.
-func_version ()
-{
- $SED -n '/^# '$PROGRAM' (GNU /,/# warranty; / {
- s/^# //
- s/^# *$//
- s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
- p
- }' < "$progpath"
- exit $?
-}
-
-# func_usage
-# Echo short help message to standard output and exit.
-func_usage ()
-{
- $SED -n '/^# Usage:/,/# -h/ {
- s/^# //
- s/^# *$//
- s/\$progname/'$progname'/
- p
- }' < "$progpath"
- $ECHO
- $ECHO "run \`$progname --help | more' for full usage"
- exit $?
-}
-
-# func_help
-# Echo long help message to standard output and exit.
-func_help ()
-{
- $SED -n '/^# Usage:/,/# Report bugs to/ {
- s/^# //
- s/^# *$//
- s*\$progname*'$progname'*
- s*\$host*'"$host"'*
- s*\$SHELL*'"$SHELL"'*
- s*\$LTCC*'"$LTCC"'*
- s*\$LTCFLAGS*'"$LTCFLAGS"'*
- s*\$LD*'"$LD"'*
- s/\$with_gnu_ld/'"$with_gnu_ld"'/
- s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/
- s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/
- p
- }' < "$progpath"
- exit $?
-}
-
-# func_missing_arg argname
-# Echo program name prefixed message to standard error and set global
-# exit_cmd.
-func_missing_arg ()
-{
- func_error "missing argument for $1"
- exit_cmd=exit
-}
-
-exit_cmd=:
-
-
-
-
-
-# Check that we have a working $ECHO.
-if test "X$1" = X--no-reexec; then
- # Discard the --no-reexec flag, and continue.
- shift
-elif test "X$1" = X--fallback-echo; then
- # Avoid inline document here, it may be left over
- :
-elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t'; then
- # Yippee, $ECHO works!
- :
-else
- # Restart under the correct shell, and then maybe $ECHO will work.
- exec $SHELL "$progpath" --no-reexec ${1+"$@"}
-fi
-
-if test "X$1" = X--fallback-echo; then
- # used as fallback echo
- shift
- cat <<EOF
-$*
-EOF
- exit $EXIT_SUCCESS
-fi
-
-magic="%%%MAGIC variable%%%"
-magic_exe="%%%MAGIC EXE variable%%%"
-
-# Global variables.
-# $mode is unset
-nonopt=
-execute_dlfiles=
-preserve_args=
-lo2o="s/\\.lo\$/.${objext}/"
-o2lo="s/\\.${objext}\$/.lo/"
-extracted_archives=
-extracted_serial=0
-
-opt_dry_run=false
-opt_duplicate_deps=false
-opt_silent=false
-opt_debug=:
-
-# If this variable is set in any of the actions, the command in it
-# will be execed at the end. This prevents here-documents from being
-# left over by shells.
-exec_cmd=
-
-# func_fatal_configuration arg...
-# Echo program name prefixed message to standard error, followed by
-# a configuration failure hint, and exit.
-func_fatal_configuration ()
-{
- func_error ${1+"$@"}
- func_error "See the $PACKAGE documentation for more information."
- func_fatal_error "Fatal configuration error."
-}
-
-
-# func_config
-# Display the configuration for all the tags in this script.
-func_config ()
-{
- re_begincf='^# ### BEGIN LIBTOOL'
- re_endcf='^# ### END LIBTOOL'
-
- # Default configuration.
- $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
-
- # Now print the configurations for the tags.
- for tagname in $taglist; do
- $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
- done
-
- exit $?
-}
-
-# func_features
-# Display the features supported by this script.
-func_features ()
-{
- $ECHO "host: $host"
- if test "$build_libtool_libs" = yes; then
- $ECHO "enable shared libraries"
- else
- $ECHO "disable shared libraries"
- fi
- if test "$build_old_libs" = yes; then
- $ECHO "enable static libraries"
- else
- $ECHO "disable static libraries"
- fi
-
- exit $?
-}
-
-# func_enable_tag tagname
-# Verify that TAGNAME is valid, and either flag an error and exit, or
-# enable the TAGNAME tag. We also add TAGNAME to the global $taglist
-# variable here.
-func_enable_tag ()
-{
- # Global variable:
- tagname="$1"
-
- re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
- re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
- sed_extractcf="/$re_begincf/,/$re_endcf/p"
-
- # Validate tagname.
- case $tagname in
- *[!-_A-Za-z0-9,/]*)
- func_fatal_error "invalid tag name: $tagname"
- ;;
- esac
-
- # Don't test for the "default" C tag, as we know it's
- # there but not specially marked.
- case $tagname in
- CC) ;;
- *)
- if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
- taglist="$taglist $tagname"
-
- # Evaluate the configuration. Be careful to quote the path
- # and the sed script, to avoid splitting on whitespace, but
- # also don't use non-portable quotes within backquotes within
- # quotes we have to do it in 2 steps:
- extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
- eval "$extractedcf"
- else
- func_error "ignoring unknown tag $tagname"
- fi
- ;;
- esac
-}
-
-# Parse options once, thoroughly. This comes as soon as possible in
-# the script to make things like `libtool --version' happen quickly.
-{
-
- # Shorthand for --mode=foo, only valid as the first argument
- case $1 in
- clean|clea|cle|cl)
- shift; set dummy --mode clean ${1+"$@"}; shift
- ;;
- compile|compil|compi|comp|com|co|c)
- shift; set dummy --mode compile ${1+"$@"}; shift
- ;;
- execute|execut|execu|exec|exe|ex|e)
- shift; set dummy --mode execute ${1+"$@"}; shift
- ;;
- finish|finis|fini|fin|fi|f)
- shift; set dummy --mode finish ${1+"$@"}; shift
- ;;
- install|instal|insta|inst|ins|in|i)
- shift; set dummy --mode install ${1+"$@"}; shift
- ;;
- link|lin|li|l)
- shift; set dummy --mode link ${1+"$@"}; shift
- ;;
- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
- shift; set dummy --mode uninstall ${1+"$@"}; shift
- ;;
- esac
-
- # Parse non-mode specific arguments:
- while test "$#" -gt 0; do
- opt="$1"
- shift
-
- case $opt in
- --config) func_config ;;
-
- --debug) preserve_args="$preserve_args $opt"
- func_echo "enabling shell trace mode"
- opt_debug='set -x'
- $opt_debug
- ;;
-
- -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break
- execute_dlfiles="$execute_dlfiles $1"
- shift
- ;;
-
- --dry-run | -n) opt_dry_run=: ;;
- --features) func_features ;;
- --finish) mode="finish" ;;
-
- --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break
- case $1 in
- # Valid mode arguments:
- clean) ;;
- compile) ;;
- execute) ;;
- finish) ;;
- install) ;;
- link) ;;
- relink) ;;
- uninstall) ;;
-
- # Catch anything else as an error
- *) func_error "invalid argument for $opt"
- exit_cmd=exit
- break
- ;;
- esac
-
- mode="$1"
- shift
- ;;
-
- --preserve-dup-deps)
- opt_duplicate_deps=: ;;
-
- --quiet|--silent) preserve_args="$preserve_args $opt"
- opt_silent=:
- ;;
-
- --verbose| -v) preserve_args="$preserve_args $opt"
- opt_silent=false
- ;;
-
- --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break
- preserve_args="$preserve_args $opt $1"
- func_enable_tag "$1" # tagname is set here
- shift
- ;;
-
- # Separate optargs to long options:
- -dlopen=*|--mode=*|--tag=*)
- func_opt_split "$opt"
- set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"}
- shift
- ;;
-
- -\?|-h) func_usage ;;
- --help) opt_help=: ;;
- --version) func_version ;;
-
- -*) func_fatal_help "unrecognized option \`$opt'" ;;
-
- *) nonopt="$opt"
- break
- ;;
- esac
- done
-
-
- case $host in
- *cygwin* | *mingw* | *pw32* | *cegcc*)
- # don't eliminate duplications in $postdeps and $predeps
- opt_duplicate_compiler_generated_deps=:
- ;;
- *)
- opt_duplicate_compiler_generated_deps=$opt_duplicate_deps
- ;;
- esac
-
- # Having warned about all mis-specified options, bail out if
- # anything was wrong.
- $exit_cmd $EXIT_FAILURE
-}
-
-# func_check_version_match
-# Ensure that we are using m4 macros, and libtool script from the same
-# release of libtool.
-func_check_version_match ()
-{
- if test "$package_revision" != "$macro_revision"; then
- if test "$VERSION" != "$macro_version"; then
- if test -z "$macro_version"; then
- cat >&2 <<_LT_EOF
-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
-$progname: definition of this LT_INIT comes from an older release.
-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
-$progname: and run autoconf again.
-_LT_EOF
- else
- cat >&2 <<_LT_EOF
-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
-$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
-$progname: and run autoconf again.
-_LT_EOF
- fi
- else
- cat >&2 <<_LT_EOF
-$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
-$progname: but the definition of this LT_INIT comes from revision $macro_revision.
-$progname: You should recreate aclocal.m4 with macros from revision $package_revision
-$progname: of $PACKAGE $VERSION and run autoconf again.
-_LT_EOF
- fi
-
- exit $EXIT_MISMATCH
- fi
-}
-
-
-## ----------- ##
-## Main. ##
-## ----------- ##
-
-$opt_help || {
- # Sanity checks first:
- func_check_version_match
-
- if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
- func_fatal_configuration "not configured to build any kind of library"
- fi
-
- test -z "$mode" && func_fatal_error "error: you must specify a MODE."
-
-
- # Darwin sucks
- eval std_shrext=\"$shrext_cmds\"
-
-
- # Only execute mode is allowed to have -dlopen flags.
- if test -n "$execute_dlfiles" && test "$mode" != execute; then
- func_error "unrecognized option \`-dlopen'"
- $ECHO "$help" 1>&2
- exit $EXIT_FAILURE
- fi
-
- # Change the help message to a mode-specific one.
- generic_help="$help"
- help="Try \`$progname --help --mode=$mode' for more information."
-}
-
-
-# func_lalib_p file
-# True iff FILE is a libtool `.la' library or `.lo' object file.
-# This function is only a basic sanity check; it will hardly flush out
-# determined imposters.
-func_lalib_p ()
-{
- test -f "$1" &&
- $SED -e 4q "$1" 2>/dev/null \
- | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
-}
-
-# func_lalib_unsafe_p file
-# True iff FILE is a libtool `.la' library or `.lo' object file.
-# This function implements the same check as func_lalib_p without
-# resorting to external programs. To this end, it redirects stdin and
-# closes it afterwards, without saving the original file descriptor.
-# As a safety measure, use it only where a negative result would be
-# fatal anyway. Works if `file' does not exist.
-func_lalib_unsafe_p ()
-{
- lalib_p=no
- if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
- for lalib_p_l in 1 2 3 4
- do
- read lalib_p_line
- case "$lalib_p_line" in
- \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
- esac
- done
- exec 0<&5 5<&-
- fi
- test "$lalib_p" = yes
-}
-
-# func_ltwrapper_script_p file
-# True iff FILE is a libtool wrapper script
-# This function is only a basic sanity check; it will hardly flush out
-# determined imposters.
-func_ltwrapper_script_p ()
-{
- func_lalib_p "$1"
-}
-
-# func_ltwrapper_executable_p file
-# True iff FILE is a libtool wrapper executable
-# This function is only a basic sanity check; it will hardly flush out
-# determined imposters.
-func_ltwrapper_executable_p ()
-{
- func_ltwrapper_exec_suffix=
- case $1 in
- *.exe) ;;
- *) func_ltwrapper_exec_suffix=.exe ;;
- esac
- $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
-}
-
-# func_ltwrapper_scriptname file
-# Assumes file is an ltwrapper_executable
-# uses $file to determine the appropriate filename for a
-# temporary ltwrapper_script.
-func_ltwrapper_scriptname ()
-{
- func_ltwrapper_scriptname_result=""
- if func_ltwrapper_executable_p "$1"; then
- func_dirname_and_basename "$1" "" "."
- func_stripname '' '.exe' "$func_basename_result"
- func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
- fi
-}
-
-# func_ltwrapper_p file
-# True iff FILE is a libtool wrapper script or wrapper executable
-# This function is only a basic sanity check; it will hardly flush out
-# determined imposters.
-func_ltwrapper_p ()
-{
- func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
-}
-
-
-# func_execute_cmds commands fail_cmd
-# Execute tilde-delimited COMMANDS.
-# If FAIL_CMD is given, eval that upon failure.
-# FAIL_CMD may read-access the current command in variable CMD!
-func_execute_cmds ()
-{
- $opt_debug
- save_ifs=$IFS; IFS='~'
- for cmd in $1; do
- IFS=$save_ifs
- eval cmd=\"$cmd\"
- func_show_eval "$cmd" "${2-:}"
- done
- IFS=$save_ifs
-}
-
-
-# func_source file
-# Source FILE, adding directory component if necessary.
-# Note that it is not necessary on cygwin/mingw to append a dot to
-# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
-# behavior happens only for exec(3), not for open(2)! Also, sourcing
-# `FILE.' does not work on cygwin managed mounts.
-func_source ()
-{
- $opt_debug
- case $1 in
- */* | *\\*) . "$1" ;;
- *) . "./$1" ;;
- esac
-}
-
-
-# func_infer_tag arg
-# Infer tagged configuration to use if any are available and
-# if one wasn't chosen via the "--tag" command line option.
-# Only attempt this if the compiler in the base compile
-# command doesn't match the default compiler.
-# arg is usually of the form 'gcc ...'
-func_infer_tag ()
-{
- $opt_debug
- if test -n "$available_tags" && test -z "$tagname"; then
- CC_quoted=
- for arg in $CC; do
- func_quote_for_eval "$arg"
- CC_quoted="$CC_quoted $func_quote_for_eval_result"
- done
- case $@ in
- # Blanks in the command may have been stripped by the calling shell,
- # but not from the CC environment variable when configure was run.
- " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*) ;;
- # Blanks at the start of $base_compile will cause this to fail
- # if we don't check for them as well.
- *)
- for z in $available_tags; do
- if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
- # Evaluate the configuration.
- eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
- CC_quoted=
- for arg in $CC; do
- # Double-quote args containing other shell metacharacters.
- func_quote_for_eval "$arg"
- CC_quoted="$CC_quoted $func_quote_for_eval_result"
- done
- case "$@ " in
- " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*)
- # The compiler in the base compile command matches
- # the one in the tagged configuration.
- # Assume this is the tagged configuration we want.
- tagname=$z
- break
- ;;
- esac
- fi
- done
- # If $tagname still isn't set, then no tagged configuration
- # was found and let the user know that the "--tag" command
- # line option must be used.
- if test -z "$tagname"; then
- func_echo "unable to infer tagged configuration"
- func_fatal_error "specify a tag with \`--tag'"
-# else
-# func_verbose "using $tagname tagged configuration"
- fi
- ;;
- esac
- fi
-}
-
-
-
-# func_write_libtool_object output_name pic_name nonpic_name
-# Create a libtool object file (analogous to a ".la" file),
-# but don't create it if we're doing a dry run.
-func_write_libtool_object ()
-{
- write_libobj=${1}
- if test "$build_libtool_libs" = yes; then
- write_lobj=\'${2}\'
- else
- write_lobj=none
- fi
-
- if test "$build_old_libs" = yes; then
- write_oldobj=\'${3}\'
- else
- write_oldobj=none
- fi
-
- $opt_dry_run || {
- cat >${write_libobj}T <<EOF
-# $write_libobj - a libtool object file
-# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
-#
-# Please DO NOT delete this file!
-# It is necessary for linking the library.
-
-# Name of the PIC object.
-pic_object=$write_lobj
-
-# Name of the non-PIC object
-non_pic_object=$write_oldobj
-
-EOF
- $MV "${write_libobj}T" "${write_libobj}"
- }
-}
-
-# func_mode_compile arg...
-func_mode_compile ()
-{
- $opt_debug
- # Get the compilation command and the source file.
- base_compile=
- srcfile="$nonopt" # always keep a non-empty value in "srcfile"
- suppress_opt=yes
- suppress_output=
- arg_mode=normal
- libobj=
- later=
- pie_flag=
-
- for arg
- do
- case $arg_mode in
- arg )
- # do not "continue". Instead, add this to base_compile
- lastarg="$arg"
- arg_mode=normal
- ;;
-
- target )
- libobj="$arg"
- arg_mode=normal
- continue
- ;;
-
- normal )
- # Accept any command-line options.
- case $arg in
- -o)
- test -n "$libobj" && \
- func_fatal_error "you cannot specify \`-o' more than once"
- arg_mode=target
- continue
- ;;
-
- -pie | -fpie | -fPIE)
- pie_flag="$pie_flag $arg"
- continue
- ;;
-
- -shared | -static | -prefer-pic | -prefer-non-pic)
- later="$later $arg"
- continue
- ;;
-
- -no-suppress)
- suppress_opt=no
- continue
- ;;
-
- -Xcompiler)
- arg_mode=arg # the next one goes into the "base_compile" arg list
- continue # The current "srcfile" will either be retained or
- ;; # replaced later. I would guess that would be a bug.
-
- -Wc,*)
- func_stripname '-Wc,' '' "$arg"
- args=$func_stripname_result
- lastarg=
- save_ifs="$IFS"; IFS=','
- for arg in $args; do
- IFS="$save_ifs"
- func_quote_for_eval "$arg"
- lastarg="$lastarg $func_quote_for_eval_result"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$lastarg"
- lastarg=$func_stripname_result
-
- # Add the arguments to base_compile.
- base_compile="$base_compile $lastarg"
- continue
- ;;
-
- *)
- # Accept the current argument as the source file.
- # The previous "srcfile" becomes the current argument.
- #
- lastarg="$srcfile"
- srcfile="$arg"
- ;;
- esac # case $arg
- ;;
- esac # case $arg_mode
-
- # Aesthetically quote the previous argument.
- func_quote_for_eval "$lastarg"
- base_compile="$base_compile $func_quote_for_eval_result"
- done # for arg
-
- case $arg_mode in
- arg)
- func_fatal_error "you must specify an argument for -Xcompile"
- ;;
- target)
- func_fatal_error "you must specify a target with \`-o'"
- ;;
- *)
- # Get the name of the library object.
- test -z "$libobj" && {
- func_basename "$srcfile"
- libobj="$func_basename_result"
- }
- ;;
- esac
-
- # Recognize several different file suffixes.
- # If the user specifies -o file.o, it is replaced with file.lo
- case $libobj in
- *.[cCFSifmso] | \
- *.ada | *.adb | *.ads | *.asm | \
- *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
- *.[fF][09]? | *.for | *.java | *.obj | *.sx)
- func_xform "$libobj"
- libobj=$func_xform_result
- ;;
- esac
-
- case $libobj in
- *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
- *)
- func_fatal_error "cannot determine name of library object from \`$libobj'"
- ;;
- esac
-
- func_infer_tag $base_compile
-
- for arg in $later; do
- case $arg in
- -shared)
- test "$build_libtool_libs" != yes && \
- func_fatal_configuration "can not build a shared library"
- build_old_libs=no
- continue
- ;;
-
- -static)
- build_libtool_libs=no
- build_old_libs=yes
- continue
- ;;
-
- -prefer-pic)
- pic_mode=yes
- continue
- ;;
-
- -prefer-non-pic)
- pic_mode=no
- continue
- ;;
- esac
- done
-
- func_quote_for_eval "$libobj"
- test "X$libobj" != "X$func_quote_for_eval_result" \
- && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \
- && func_warning "libobj name \`$libobj' may not contain shell special characters."
- func_dirname_and_basename "$obj" "/" ""
- objname="$func_basename_result"
- xdir="$func_dirname_result"
- lobj=${xdir}$objdir/$objname
-
- test -z "$base_compile" && \
- func_fatal_help "you must specify a compilation command"
-
- # Delete any leftover library objects.
- if test "$build_old_libs" = yes; then
- removelist="$obj $lobj $libobj ${libobj}T"
- else
- removelist="$lobj $libobj ${libobj}T"
- fi
-
- # On Cygwin there's no "real" PIC flag so we must build both object types
- case $host_os in
- cygwin* | mingw* | pw32* | os2* | cegcc*)
- pic_mode=default
- ;;
- esac
- if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
- # non-PIC code in shared libraries is not supported
- pic_mode=default
- fi
-
- # Calculate the filename of the output object if compiler does
- # not support -o with -c
- if test "$compiler_c_o" = no; then
- output_obj=`$ECHO "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext}
- lockfile="$output_obj.lock"
- else
- output_obj=
- need_locks=no
- lockfile=
- fi
-
- # Lock this critical section if it is needed
- # We use this script file to make the link, it avoids creating a new file
- if test "$need_locks" = yes; then
- until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
- func_echo "Waiting for $lockfile to be removed"
- sleep 2
- done
- elif test "$need_locks" = warn; then
- if test -f "$lockfile"; then
- $ECHO "\
-*** ERROR, $lockfile exists and contains:
-`cat $lockfile 2>/dev/null`
-
-This indicates that another process is trying to use the same
-temporary object file, and libtool could not work around it because
-your compiler does not support \`-c' and \`-o' together. If you
-repeat this compilation, it may succeed, by chance, but you had better
-avoid parallel builds (make -j) in this platform, or get a better
-compiler."
-
- $opt_dry_run || $RM $removelist
- exit $EXIT_FAILURE
- fi
- removelist="$removelist $output_obj"
- $ECHO "$srcfile" > "$lockfile"
- fi
-
- $opt_dry_run || $RM $removelist
- removelist="$removelist $lockfile"
- trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
-
- if test -n "$fix_srcfile_path"; then
- eval srcfile=\"$fix_srcfile_path\"
- fi
- func_quote_for_eval "$srcfile"
- qsrcfile=$func_quote_for_eval_result
-
- # Only build a PIC object if we are building libtool libraries.
- if test "$build_libtool_libs" = yes; then
- # Without this assignment, base_compile gets emptied.
- fbsd_hideous_sh_bug=$base_compile
-
- if test "$pic_mode" != no; then
- command="$base_compile $qsrcfile $pic_flag"
- else
- # Don't build PIC code
- command="$base_compile $qsrcfile"
- fi
-
- func_mkdir_p "$xdir$objdir"
-
- if test -z "$output_obj"; then
- # Place PIC objects in $objdir
- command="$command -o $lobj"
- fi
-
- func_show_eval_locale "$command" \
- 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
-
- if test "$need_locks" = warn &&
- test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
- $ECHO "\
-*** ERROR, $lockfile contains:
-`cat $lockfile 2>/dev/null`
-
-but it should contain:
-$srcfile
-
-This indicates that another process is trying to use the same
-temporary object file, and libtool could not work around it because
-your compiler does not support \`-c' and \`-o' together. If you
-repeat this compilation, it may succeed, by chance, but you had better
-avoid parallel builds (make -j) in this platform, or get a better
-compiler."
-
- $opt_dry_run || $RM $removelist
- exit $EXIT_FAILURE
- fi
-
- # Just move the object if needed, then go on to compile the next one
- if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
- func_show_eval '$MV "$output_obj" "$lobj"' \
- 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
- fi
-
- # Allow error messages only from the first compilation.
- if test "$suppress_opt" = yes; then
- suppress_output=' >/dev/null 2>&1'
- fi
- fi
-
- # Only build a position-dependent object if we build old libraries.
- if test "$build_old_libs" = yes; then
- if test "$pic_mode" != yes; then
- # Don't build PIC code
- command="$base_compile $qsrcfile$pie_flag"
- else
- command="$base_compile $qsrcfile $pic_flag"
- fi
- if test "$compiler_c_o" = yes; then
- command="$command -o $obj"
- fi
-
- # Suppress compiler output if we already did a PIC compilation.
- command="$command$suppress_output"
- func_show_eval_locale "$command" \
- '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
-
- if test "$need_locks" = warn &&
- test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
- $ECHO "\
-*** ERROR, $lockfile contains:
-`cat $lockfile 2>/dev/null`
-
-but it should contain:
-$srcfile
-
-This indicates that another process is trying to use the same
-temporary object file, and libtool could not work around it because
-your compiler does not support \`-c' and \`-o' together. If you
-repeat this compilation, it may succeed, by chance, but you had better
-avoid parallel builds (make -j) in this platform, or get a better
-compiler."
-
- $opt_dry_run || $RM $removelist
- exit $EXIT_FAILURE
- fi
-
- # Just move the object if needed
- if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
- func_show_eval '$MV "$output_obj" "$obj"' \
- 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
- fi
- fi
-
- $opt_dry_run || {
- func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
-
- # Unlock the critical section if it was locked
- if test "$need_locks" != no; then
- removelist=$lockfile
- $RM "$lockfile"
- fi
- }
-
- exit $EXIT_SUCCESS
-}
-
-$opt_help || {
-test "$mode" = compile && func_mode_compile ${1+"$@"}
-}
-
-func_mode_help ()
-{
- # We need to display help for each of the modes.
- case $mode in
- "")
- # Generic help is extracted from the usage comments
- # at the start of this file.
- func_help
- ;;
-
- clean)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
-
-Remove files from the build directory.
-
-RM is the name of the program to use to delete files associated with each FILE
-(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
-to RM.
-
-If FILE is a libtool library, object or program, all the files associated
-with it are deleted. Otherwise, only FILE itself is deleted using RM."
- ;;
-
- compile)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
-
-Compile a source file into a libtool library object.
-
-This mode accepts the following additional options:
-
- -o OUTPUT-FILE set the output file name to OUTPUT-FILE
- -no-suppress do not suppress compiler output for multiple passes
- -prefer-pic try to building PIC objects only
- -prefer-non-pic try to building non-PIC objects only
- -shared do not build a \`.o' file suitable for static linking
- -static only build a \`.o' file suitable for static linking
-
-COMPILE-COMMAND is a command to be used in creating a \`standard' object file
-from the given SOURCEFILE.
-
-The output file name is determined by removing the directory component from
-SOURCEFILE, then substituting the C source code suffix \`.c' with the
-library object suffix, \`.lo'."
- ;;
-
- execute)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
-
-Automatically set library path, then run a program.
-
-This mode accepts the following additional options:
-
- -dlopen FILE add the directory containing FILE to the library path
-
-This mode sets the library path environment variable according to \`-dlopen'
-flags.
-
-If any of the ARGS are libtool executable wrappers, then they are translated
-into their corresponding uninstalled binary, and any of their required library
-directories are added to the library path.
-
-Then, COMMAND is executed, with ARGS as arguments."
- ;;
-
- finish)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
-
-Complete the installation of libtool libraries.
-
-Each LIBDIR is a directory that contains libtool libraries.
-
-The commands that this mode executes may require superuser privileges. Use
-the \`--dry-run' option if you just want to see what would be executed."
- ;;
-
- install)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
-
-Install executables or libraries.
-
-INSTALL-COMMAND is the installation command. The first component should be
-either the \`install' or \`cp' program.
-
-The following components of INSTALL-COMMAND are treated specially:
-
- -inst-prefix PREFIX-DIR Use PREFIX-DIR as a staging area for installation
-
-The rest of the components are interpreted as arguments to that command (only
-BSD-compatible install options are recognized)."
- ;;
-
- link)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
-
-Link object files or libraries together to form another library, or to
-create an executable program.
-
-LINK-COMMAND is a command using the C compiler that you would use to create
-a program from several object files.
-
-The following components of LINK-COMMAND are treated specially:
-
- -all-static do not do any dynamic linking at all
- -avoid-version do not add a version suffix if possible
- -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime
- -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
- -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
- -export-symbols SYMFILE
- try to export only the symbols listed in SYMFILE
- -export-symbols-regex REGEX
- try to export only the symbols matching REGEX
- -LLIBDIR search LIBDIR for required installed libraries
- -lNAME OUTPUT-FILE requires the installed library libNAME
- -module build a library that can dlopened
- -no-fast-install disable the fast-install mode
- -no-install link a not-installable executable
- -no-undefined declare that a library does not refer to external symbols
- -o OUTPUT-FILE create OUTPUT-FILE from the specified objects
- -objectlist FILE Use a list of object files found in FILE to specify objects
- -precious-files-regex REGEX
- don't remove output files matching REGEX
- -release RELEASE specify package release information
- -rpath LIBDIR the created library will eventually be installed in LIBDIR
- -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
- -shared only do dynamic linking of libtool libraries
- -shrext SUFFIX override the standard shared library file extension
- -static do not do any dynamic linking of uninstalled libtool libraries
- -static-libtool-libs
- do not do any dynamic linking of libtool libraries
- -version-info CURRENT[:REVISION[:AGE]]
- specify library version info [each variable defaults to 0]
- -weak LIBNAME declare that the target provides the LIBNAME interface
-
-All other options (arguments beginning with \`-') are ignored.
-
-Every other argument is treated as a filename. Files ending in \`.la' are
-treated as uninstalled libtool libraries, other files are standard or library
-object files.
-
-If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
-only library objects (\`.lo' files) may be specified, and \`-rpath' is
-required, except when creating a convenience library.
-
-If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
-using \`ar' and \`ranlib', or on Windows using \`lib'.
-
-If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
-is created, otherwise an executable program is created."
- ;;
-
- uninstall)
- $ECHO \
-"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
-
-Remove libraries from an installation directory.
-
-RM is the name of the program to use to delete files associated with each FILE
-(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
-to RM.
-
-If FILE is a libtool library, all the files associated with it are deleted.
-Otherwise, only FILE itself is deleted using RM."
- ;;
-
- *)
- func_fatal_help "invalid operation mode \`$mode'"
- ;;
- esac
-
- $ECHO
- $ECHO "Try \`$progname --help' for more information about other modes."
-
- exit $?
-}
-
- # Now that we've collected a possible --mode arg, show help if necessary
- $opt_help && func_mode_help
-
-
-# func_mode_execute arg...
-func_mode_execute ()
-{
- $opt_debug
- # The first argument is the command name.
- cmd="$nonopt"
- test -z "$cmd" && \
- func_fatal_help "you must specify a COMMAND"
-
- # Handle -dlopen flags immediately.
- for file in $execute_dlfiles; do
- test -f "$file" \
- || func_fatal_help "\`$file' is not a file"
-
- dir=
- case $file in
- *.la)
- # Check to see that this really is a libtool archive.
- func_lalib_unsafe_p "$file" \
- || func_fatal_help "\`$lib' is not a valid libtool archive"
-
- # Read the libtool library.
- dlname=
- library_names=
- func_source "$file"
-
- # Skip this library if it cannot be dlopened.
- if test -z "$dlname"; then
- # Warn if it was a shared library.
- test -n "$library_names" && \
- func_warning "\`$file' was not linked with \`-export-dynamic'"
- continue
- fi
-
- func_dirname "$file" "" "."
- dir="$func_dirname_result"
-
- if test -f "$dir/$objdir/$dlname"; then
- dir="$dir/$objdir"
- else
- if test ! -f "$dir/$dlname"; then
- func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
- fi
- fi
- ;;
-
- *.lo)
- # Just add the directory containing the .lo file.
- func_dirname "$file" "" "."
- dir="$func_dirname_result"
- ;;
-
- *)
- func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
- continue
- ;;
- esac
-
- # Get the absolute pathname.
- absdir=`cd "$dir" && pwd`
- test -n "$absdir" && dir="$absdir"
-
- # Now add the directory to shlibpath_var.
- if eval "test -z \"\$$shlibpath_var\""; then
- eval "$shlibpath_var=\"\$dir\""
- else
- eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
- fi
- done
-
- # This variable tells wrapper scripts just to set shlibpath_var
- # rather than running their programs.
- libtool_execute_magic="$magic"
-
- # Check if any of the arguments is a wrapper script.
- args=
- for file
- do
- case $file in
- -*) ;;
- *)
- # Do a test to see if this is really a libtool program.
- if func_ltwrapper_script_p "$file"; then
- func_source "$file"
- # Transform arg to wrapped name.
- file="$progdir/$program"
- elif func_ltwrapper_executable_p "$file"; then
- func_ltwrapper_scriptname "$file"
- func_source "$func_ltwrapper_scriptname_result"
- # Transform arg to wrapped name.
- file="$progdir/$program"
- fi
- ;;
- esac
- # Quote arguments (to preserve shell metacharacters).
- func_quote_for_eval "$file"
- args="$args $func_quote_for_eval_result"
- done
-
- if test "X$opt_dry_run" = Xfalse; then
- if test -n "$shlibpath_var"; then
- # Export the shlibpath_var.
- eval "export $shlibpath_var"
- fi
-
- # Restore saved environment variables
- for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
- do
- eval "if test \"\${save_$lt_var+set}\" = set; then
- $lt_var=\$save_$lt_var; export $lt_var
- else
- $lt_unset $lt_var
- fi"
- done
-
- # Now prepare to actually exec the command.
- exec_cmd="\$cmd$args"
- else
- # Display what would be done.
- if test -n "$shlibpath_var"; then
- eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
- $ECHO "export $shlibpath_var"
- fi
- $ECHO "$cmd$args"
- exit $EXIT_SUCCESS
- fi
-}
-
-test "$mode" = execute && func_mode_execute ${1+"$@"}
-
-
-# func_mode_finish arg...
-func_mode_finish ()
-{
- $opt_debug
- libdirs="$nonopt"
- admincmds=
-
- if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
- for dir
- do
- libdirs="$libdirs $dir"
- done
-
- for libdir in $libdirs; do
- if test -n "$finish_cmds"; then
- # Do each command in the finish commands.
- func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
-'"$cmd"'"'
- fi
- if test -n "$finish_eval"; then
- # Do the single finish_eval.
- eval cmds=\"$finish_eval\"
- $opt_dry_run || eval "$cmds" || admincmds="$admincmds
- $cmds"
- fi
- done
- fi
-
- # Exit here if they wanted silent mode.
- $opt_silent && exit $EXIT_SUCCESS
-
- $ECHO "X----------------------------------------------------------------------" | $Xsed
- $ECHO "Libraries have been installed in:"
- for libdir in $libdirs; do
- $ECHO " $libdir"
- done
- $ECHO
- $ECHO "If you ever happen to want to link against installed libraries"
- $ECHO "in a given directory, LIBDIR, you must either use libtool, and"
- $ECHO "specify the full pathname of the library, or use the \`-LLIBDIR'"
- $ECHO "flag during linking and do at least one of the following:"
- if test -n "$shlibpath_var"; then
- $ECHO " - add LIBDIR to the \`$shlibpath_var' environment variable"
- $ECHO " during execution"
- fi
- if test -n "$runpath_var"; then
- $ECHO " - add LIBDIR to the \`$runpath_var' environment variable"
- $ECHO " during linking"
- fi
- if test -n "$hardcode_libdir_flag_spec"; then
- libdir=LIBDIR
- eval flag=\"$hardcode_libdir_flag_spec\"
-
- $ECHO " - use the \`$flag' linker flag"
- fi
- if test -n "$admincmds"; then
- $ECHO " - have your system administrator run these commands:$admincmds"
- fi
- if test -f /etc/ld.so.conf; then
- $ECHO " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
- fi
- $ECHO
-
- $ECHO "See any operating system documentation about shared libraries for"
- case $host in
- solaris2.[6789]|solaris2.1[0-9])
- $ECHO "more information, such as the ld(1), crle(1) and ld.so(8) manual"
- $ECHO "pages."
- ;;
- *)
- $ECHO "more information, such as the ld(1) and ld.so(8) manual pages."
- ;;
- esac
- $ECHO "X----------------------------------------------------------------------" | $Xsed
- exit $EXIT_SUCCESS
-}
-
-test "$mode" = finish && func_mode_finish ${1+"$@"}
-
-
-# func_mode_install arg...
-func_mode_install ()
-{
- $opt_debug
- # There may be an optional sh(1) argument at the beginning of
- # install_prog (especially on Windows NT).
- if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
- # Allow the use of GNU shtool's install command.
- $ECHO "X$nonopt" | $GREP shtool >/dev/null; then
- # Aesthetically quote it.
- func_quote_for_eval "$nonopt"
- install_prog="$func_quote_for_eval_result "
- arg=$1
- shift
- else
- install_prog=
- arg=$nonopt
- fi
-
- # The real first argument should be the name of the installation program.
- # Aesthetically quote it.
- func_quote_for_eval "$arg"
- install_prog="$install_prog$func_quote_for_eval_result"
-
- # We need to accept at least all the BSD install flags.
- dest=
- files=
- opts=
- prev=
- install_type=
- isdir=no
- stripme=
- for arg
- do
- if test -n "$dest"; then
- files="$files $dest"
- dest=$arg
- continue
- fi
-
- case $arg in
- -d) isdir=yes ;;
- -f)
- case " $install_prog " in
- *[\\\ /]cp\ *) ;;
- *) prev=$arg ;;
- esac
- ;;
- -g | -m | -o)
- prev=$arg
- ;;
- -s)
- stripme=" -s"
- continue
- ;;
- -*)
- ;;
- *)
- # If the previous option needed an argument, then skip it.
- if test -n "$prev"; then
- prev=
- else
- dest=$arg
- continue
- fi
- ;;
- esac
-
- # Aesthetically quote the argument.
- func_quote_for_eval "$arg"
- install_prog="$install_prog $func_quote_for_eval_result"
- done
-
- test -z "$install_prog" && \
- func_fatal_help "you must specify an install program"
-
- test -n "$prev" && \
- func_fatal_help "the \`$prev' option requires an argument"
-
- if test -z "$files"; then
- if test -z "$dest"; then
- func_fatal_help "no file or destination specified"
- else
- func_fatal_help "you must specify a destination"
- fi
- fi
-
- # Strip any trailing slash from the destination.
- func_stripname '' '/' "$dest"
- dest=$func_stripname_result
-
- # Check to see that the destination is a directory.
- test -d "$dest" && isdir=yes
- if test "$isdir" = yes; then
- destdir="$dest"
- destname=
- else
- func_dirname_and_basename "$dest" "" "."
- destdir="$func_dirname_result"
- destname="$func_basename_result"
-
- # Not a directory, so check to see that there is only one file specified.
- set dummy $files; shift
- test "$#" -gt 1 && \
- func_fatal_help "\`$dest' is not a directory"
- fi
- case $destdir in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
- *)
- for file in $files; do
- case $file in
- *.lo) ;;
- *)
- func_fatal_help "\`$destdir' must be an absolute directory name"
- ;;
- esac
- done
- ;;
- esac
-
- # This variable tells wrapper scripts just to set variables rather
- # than running their programs.
- libtool_install_magic="$magic"
-
- staticlibs=
- future_libdirs=
- current_libdirs=
- for file in $files; do
-
- # Do each installation.
- case $file in
- *.$libext)
- # Do the static libraries later.
- staticlibs="$staticlibs $file"
- ;;
-
- *.la)
- # Check to see that this really is a libtool archive.
- func_lalib_unsafe_p "$file" \
- || func_fatal_help "\`$file' is not a valid libtool archive"
-
- library_names=
- old_library=
- relink_command=
- func_source "$file"
-
- # Add the libdir to current_libdirs if it is the destination.
- if test "X$destdir" = "X$libdir"; then
- case "$current_libdirs " in
- *" $libdir "*) ;;
- *) current_libdirs="$current_libdirs $libdir" ;;
- esac
- else
- # Note the libdir as a future libdir.
- case "$future_libdirs " in
- *" $libdir "*) ;;
- *) future_libdirs="$future_libdirs $libdir" ;;
- esac
- fi
-
- func_dirname "$file" "/" ""
- dir="$func_dirname_result"
- dir="$dir$objdir"
-
- if test -n "$relink_command"; then
- # Determine the prefix the user has applied to our future dir.
- inst_prefix_dir=`$ECHO "X$destdir" | $Xsed -e "s%$libdir\$%%"`
-
- # Don't allow the user to place us outside of our expected
- # location b/c this prevents finding dependent libraries that
- # are installed to the same prefix.
- # At present, this check doesn't affect windows .dll's that
- # are installed into $libdir/../bin (currently, that works fine)
- # but it's something to keep an eye on.
- test "$inst_prefix_dir" = "$destdir" && \
- func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
-
- if test -n "$inst_prefix_dir"; then
- # Stick the inst_prefix_dir data into the link command.
- relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
- else
- relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%%"`
- fi
-
- func_warning "relinking \`$file'"
- func_show_eval "$relink_command" \
- 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
- fi
-
- # See the names of the shared library.
- set dummy $library_names; shift
- if test -n "$1"; then
- realname="$1"
- shift
-
- srcname="$realname"
- test -n "$relink_command" && srcname="$realname"T
-
- # Install the shared library and build the symlinks.
- func_show_eval "$install_prog $dir/$srcname $destdir/$realname" \
- 'exit $?'
- tstripme="$stripme"
- case $host_os in
- cygwin* | mingw* | pw32* | cegcc*)
- case $realname in
- *.dll.a)
- tstripme=""
- ;;
- esac
- ;;
- esac
- if test -n "$tstripme" && test -n "$striplib"; then
- func_show_eval "$striplib $destdir/$realname" 'exit $?'
- fi
-
- if test "$#" -gt 0; then
- # Delete the old symlinks, and create new ones.
- # Try `ln -sf' first, because the `ln' binary might depend on
- # the symlink we replace! Solaris /bin/ln does not understand -f,
- # so we also need to try rm && ln -s.
- for linkname
- do
- test "$linkname" != "$realname" \
- && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
- done
- fi
-
- # Do each command in the postinstall commands.
- lib="$destdir/$realname"
- func_execute_cmds "$postinstall_cmds" 'exit $?'
- fi
-
- # Install the pseudo-library for information purposes.
- func_basename "$file"
- name="$func_basename_result"
- instname="$dir/$name"i
- func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
-
- # Maybe install the static library, too.
- test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
- ;;
-
- *.lo)
- # Install (i.e. copy) a libtool object.
-
- # Figure out destination file name, if it wasn't already specified.
- if test -n "$destname"; then
- destfile="$destdir/$destname"
- else
- func_basename "$file"
- destfile="$func_basename_result"
- destfile="$destdir/$destfile"
- fi
-
- # Deduce the name of the destination old-style object file.
- case $destfile in
- *.lo)
- func_lo2o "$destfile"
- staticdest=$func_lo2o_result
- ;;
- *.$objext)
- staticdest="$destfile"
- destfile=
- ;;
- *)
- func_fatal_help "cannot copy a libtool object to \`$destfile'"
- ;;
- esac
-
- # Install the libtool object if requested.
- test -n "$destfile" && \
- func_show_eval "$install_prog $file $destfile" 'exit $?'
-
- # Install the old object if enabled.
- if test "$build_old_libs" = yes; then
- # Deduce the name of the old-style object file.
- func_lo2o "$file"
- staticobj=$func_lo2o_result
- func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
- fi
- exit $EXIT_SUCCESS
- ;;
-
- *)
- # Figure out destination file name, if it wasn't already specified.
- if test -n "$destname"; then
- destfile="$destdir/$destname"
- else
- func_basename "$file"
- destfile="$func_basename_result"
- destfile="$destdir/$destfile"
- fi
-
- # If the file is missing, and there is a .exe on the end, strip it
- # because it is most likely a libtool script we actually want to
- # install
- stripped_ext=""
- case $file in
- *.exe)
- if test ! -f "$file"; then
- func_stripname '' '.exe' "$file"
- file=$func_stripname_result
- stripped_ext=".exe"
- fi
- ;;
- esac
-
- # Do a test to see if this is really a libtool program.
- case $host in
- *cygwin* | *mingw*)
- if func_ltwrapper_executable_p "$file"; then
- func_ltwrapper_scriptname "$file"
- wrapper=$func_ltwrapper_scriptname_result
- else
- func_stripname '' '.exe' "$file"
- wrapper=$func_stripname_result
- fi
- ;;
- *)
- wrapper=$file
- ;;
- esac
- if func_ltwrapper_script_p "$wrapper"; then
- notinst_deplibs=
- relink_command=
-
- func_source "$wrapper"
-
- # Check the variables that should have been set.
- test -z "$generated_by_libtool_version" && \
- func_fatal_error "invalid libtool wrapper script \`$wrapper'"
-
- finalize=yes
- for lib in $notinst_deplibs; do
- # Check to see that each library is installed.
- libdir=
- if test -f "$lib"; then
- func_source "$lib"
- fi
- libfile="$libdir/"`$ECHO "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test
- if test -n "$libdir" && test ! -f "$libfile"; then
- func_warning "\`$lib' has not been installed in \`$libdir'"
- finalize=no
- fi
- done
-
- relink_command=
- func_source "$wrapper"
-
- outputname=
- if test "$fast_install" = no && test -n "$relink_command"; then
- $opt_dry_run || {
- if test "$finalize" = yes; then
- tmpdir=`func_mktempdir`
- func_basename "$file$stripped_ext"
- file="$func_basename_result"
- outputname="$tmpdir/$file"
- # Replace the output file specification.
- relink_command=`$ECHO "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'`
-
- $opt_silent || {
- func_quote_for_expand "$relink_command"
- eval "func_echo $func_quote_for_expand_result"
- }
- if eval "$relink_command"; then :
- else
- func_error "error: relink \`$file' with the above command before installing it"
- $opt_dry_run || ${RM}r "$tmpdir"
- continue
- fi
- file="$outputname"
- else
- func_warning "cannot relink \`$file'"
- fi
- }
- else
- # Install the binary that we compiled earlier.
- file=`$ECHO "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"`
- fi
- fi
-
- # remove .exe since cygwin /usr/bin/install will append another
- # one anyway
- case $install_prog,$host in
- */usr/bin/install*,*cygwin*)
- case $file:$destfile in
- *.exe:*.exe)
- # this is ok
- ;;
- *.exe:*)
- destfile=$destfile.exe
- ;;
- *:*.exe)
- func_stripname '' '.exe' "$destfile"
- destfile=$func_stripname_result
- ;;
- esac
- ;;
- esac
- func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
- $opt_dry_run || if test -n "$outputname"; then
- ${RM}r "$tmpdir"
- fi
- ;;
- esac
- done
-
- for file in $staticlibs; do
- func_basename "$file"
- name="$func_basename_result"
-
- # Set up the ranlib parameters.
- oldlib="$destdir/$name"
-
- func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
-
- if test -n "$stripme" && test -n "$old_striplib"; then
- func_show_eval "$old_striplib $oldlib" 'exit $?'
- fi
-
- # Do each command in the postinstall commands.
- func_execute_cmds "$old_postinstall_cmds" 'exit $?'
- done
-
- test -n "$future_libdirs" && \
- func_warning "remember to run \`$progname --finish$future_libdirs'"
-
- if test -n "$current_libdirs"; then
- # Maybe just do a dry run.
- $opt_dry_run && current_libdirs=" -n$current_libdirs"
- exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
- else
- exit $EXIT_SUCCESS
- fi
-}
-
-test "$mode" = install && func_mode_install ${1+"$@"}
-
-
-# func_generate_dlsyms outputname originator pic_p
-# Extract symbols from dlprefiles and create ${outputname}S.o with
-# a dlpreopen symbol table.
-func_generate_dlsyms ()
-{
- $opt_debug
- my_outputname="$1"
- my_originator="$2"
- my_pic_p="${3-no}"
- my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
- my_dlsyms=
-
- if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
- if test -n "$NM" && test -n "$global_symbol_pipe"; then
- my_dlsyms="${my_outputname}S.c"
- else
- func_error "not configured to extract global symbols from dlpreopened files"
- fi
- fi
-
- if test -n "$my_dlsyms"; then
- case $my_dlsyms in
- "") ;;
- *.c)
- # Discover the nlist of each of the dlfiles.
- nlist="$output_objdir/${my_outputname}.nm"
-
- func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
-
- # Parse the name list into a source file.
- func_verbose "creating $output_objdir/$my_dlsyms"
-
- $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
-/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
-/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
-
-#ifdef __cplusplus
-extern \"C\" {
-#endif
-
-/* External symbol declarations for the compiler. */\
-"
-
- if test "$dlself" = yes; then
- func_verbose "generating symbol list for \`$output'"
-
- $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
-
- # Add our own program objects to the symbol list.
- progfiles=`$ECHO "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
- for progfile in $progfiles; do
- func_verbose "extracting global C symbols from \`$progfile'"
- $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'"
- done
-
- if test -n "$exclude_expsyms"; then
- $opt_dry_run || {
- eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
- eval '$MV "$nlist"T "$nlist"'
- }
- fi
-
- if test -n "$export_symbols_regex"; then
- $opt_dry_run || {
- eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
- eval '$MV "$nlist"T "$nlist"'
- }
- fi
-
- # Prepare the list of exported symbols
- if test -z "$export_symbols"; then
- export_symbols="$output_objdir/$outputname.exp"
- $opt_dry_run || {
- $RM $export_symbols
- eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
- case $host in
- *cygwin* | *mingw* | *cegcc* )
- eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
- eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
- ;;
- esac
- }
- else
- $opt_dry_run || {
- eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
- eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
- eval '$MV "$nlist"T "$nlist"'
- case $host in
- *cygwin | *mingw* | *cegcc* )
- eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
- eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
- ;;
- esac
- }
- fi
- fi
-
- for dlprefile in $dlprefiles; do
- func_verbose "extracting global C symbols from \`$dlprefile'"
- func_basename "$dlprefile"
- name="$func_basename_result"
- $opt_dry_run || {
- eval '$ECHO ": $name " >> "$nlist"'
- eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'"
- }
- done
-
- $opt_dry_run || {
- # Make sure we have at least an empty file.
- test -f "$nlist" || : > "$nlist"
-
- if test -n "$exclude_expsyms"; then
- $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
- $MV "$nlist"T "$nlist"
- fi
-
- # Try sorting and uniquifying the output.
- if $GREP -v "^: " < "$nlist" |
- if sort -k 3 </dev/null >/dev/null 2>&1; then
- sort -k 3
- else
- sort +2
- fi |
- uniq > "$nlist"S; then
- :
- else
- $GREP -v "^: " < "$nlist" > "$nlist"S
- fi
-
- if test -f "$nlist"S; then
- eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
- else
- $ECHO '/* NONE */' >> "$output_objdir/$my_dlsyms"
- fi
-
- $ECHO >> "$output_objdir/$my_dlsyms" "\
-
-/* The mapping between symbol names and symbols. */
-typedef struct {
- const char *name;
- void *address;
-} lt_dlsymlist;
-"
- case $host in
- *cygwin* | *mingw* | *cegcc* )
- $ECHO >> "$output_objdir/$my_dlsyms" "\
-/* DATA imports from DLLs on WIN32 con't be const, because
- runtime relocations are performed -- see ld's documentation
- on pseudo-relocs. */"
- lt_dlsym_const= ;;
- *osf5*)
- echo >> "$output_objdir/$my_dlsyms" "\
-/* This system does not cope well with relocations in const data */"
- lt_dlsym_const= ;;
- *)
- lt_dlsym_const=const ;;
- esac
-
- $ECHO >> "$output_objdir/$my_dlsyms" "\
-extern $lt_dlsym_const lt_dlsymlist
-lt_${my_prefix}_LTX_preloaded_symbols[];
-$lt_dlsym_const lt_dlsymlist
-lt_${my_prefix}_LTX_preloaded_symbols[] =
-{\
- { \"$my_originator\", (void *) 0 },"
-
- case $need_lib_prefix in
- no)
- eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
- ;;
- *)
- eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
- ;;
- esac
- $ECHO >> "$output_objdir/$my_dlsyms" "\
- {0, (void *) 0}
-};
-
-/* This works around a problem in FreeBSD linker */
-#ifdef FREEBSD_WORKAROUND
-static const void *lt_preloaded_setup() {
- return lt_${my_prefix}_LTX_preloaded_symbols;
-}
-#endif
-
-#ifdef __cplusplus
-}
-#endif\
-"
- } # !$opt_dry_run
-
- pic_flag_for_symtable=
- case "$compile_command " in
- *" -static "*) ;;
- *)
- case $host in
- # compiling the symbol table file with pic_flag works around
- # a FreeBSD bug that causes programs to crash when -lm is
- # linked before any other PIC object. But we must not use
- # pic_flag when linking with -static. The problem exists in
- # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
- *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
- pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
- *-*-hpux*)
- pic_flag_for_symtable=" $pic_flag" ;;
- *)
- if test "X$my_pic_p" != Xno; then
- pic_flag_for_symtable=" $pic_flag"
- fi
- ;;
- esac
- ;;
- esac
- symtab_cflags=
- for arg in $LTCFLAGS; do
- case $arg in
- -pie | -fpie | -fPIE) ;;
- *) symtab_cflags="$symtab_cflags $arg" ;;
- esac
- done
-
- # Now compile the dynamic symbol file.
- func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
-
- # Clean up the generated files.
- func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
-
- # Transform the symbol file into the correct name.
- symfileobj="$output_objdir/${my_outputname}S.$objext"
- case $host in
- *cygwin* | *mingw* | *cegcc* )
- if test -f "$output_objdir/$my_outputname.def"; then
- compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
- finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
- else
- compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"`
- finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"`
- fi
- ;;
- *)
- compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"`
- finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"`
- ;;
- esac
- ;;
- *)
- func_fatal_error "unknown suffix for \`$my_dlsyms'"
- ;;
- esac
- else
- # We keep going just in case the user didn't refer to
- # lt_preloaded_symbols. The linker will fail if global_symbol_pipe
- # really was required.
-
- # Nullify the symbol file.
- compile_command=`$ECHO "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"`
- finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"`
- fi
-}
-
-# func_win32_libid arg
-# return the library type of file 'arg'
-#
-# Need a lot of goo to handle *both* DLLs and import libs
-# Has to be a shell function in order to 'eat' the argument
-# that is supplied when $file_magic_command is called.
-func_win32_libid ()
-{
- $opt_debug
- win32_libid_type="unknown"
- win32_fileres=`file -L $1 2>/dev/null`
- case $win32_fileres in
- *ar\ archive\ import\ library*) # definitely import
- win32_libid_type="x86 archive import"
- ;;
- *ar\ archive*) # could be an import, or static
- if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
- $EGREP 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then
- win32_nmres=`eval $NM -f posix -A $1 |
- $SED -n -e '
- 1,100{
- / I /{
- s,.*,import,
- p
- q
- }
- }'`
- case $win32_nmres in
- import*) win32_libid_type="x86 archive import";;
- *) win32_libid_type="x86 archive static";;
- esac
- fi
- ;;
- *DLL*)
- win32_libid_type="x86 DLL"
- ;;
- *executable*) # but shell scripts are "executable" too...
- case $win32_fileres in
- *MS\ Windows\ PE\ Intel*)
- win32_libid_type="x86 DLL"
- ;;
- esac
- ;;
- esac
- $ECHO "$win32_libid_type"
-}
-
-
-
-# func_extract_an_archive dir oldlib
-func_extract_an_archive ()
-{
- $opt_debug
- f_ex_an_ar_dir="$1"; shift
- f_ex_an_ar_oldlib="$1"
- func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" 'exit $?'
- if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
- :
- else
- func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
- fi
-}
-
-
-# func_extract_archives gentop oldlib ...
-func_extract_archives ()
-{
- $opt_debug
- my_gentop="$1"; shift
- my_oldlibs=${1+"$@"}
- my_oldobjs=""
- my_xlib=""
- my_xabs=""
- my_xdir=""
-
- for my_xlib in $my_oldlibs; do
- # Extract the objects.
- case $my_xlib in
- [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
- *) my_xabs=`pwd`"/$my_xlib" ;;
- esac
- func_basename "$my_xlib"
- my_xlib="$func_basename_result"
- my_xlib_u=$my_xlib
- while :; do
- case " $extracted_archives " in
- *" $my_xlib_u "*)
- func_arith $extracted_serial + 1
- extracted_serial=$func_arith_result
- my_xlib_u=lt$extracted_serial-$my_xlib ;;
- *) break ;;
- esac
- done
- extracted_archives="$extracted_archives $my_xlib_u"
- my_xdir="$my_gentop/$my_xlib_u"
-
- func_mkdir_p "$my_xdir"
-
- case $host in
- *-darwin*)
- func_verbose "Extracting $my_xabs"
- # Do not bother doing anything if just a dry run
- $opt_dry_run || {
- darwin_orig_dir=`pwd`
- cd $my_xdir || exit $?
- darwin_archive=$my_xabs
- darwin_curdir=`pwd`
- darwin_base_archive=`basename "$darwin_archive"`
- darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
- if test -n "$darwin_arches"; then
- darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
- darwin_arch=
- func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
- for darwin_arch in $darwin_arches ; do
- func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
- $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
- cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
- func_extract_an_archive "`pwd`" "${darwin_base_archive}"
- cd "$darwin_curdir"
- $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
- done # $darwin_arches
- ## Okay now we've a bunch of thin objects, gotta fatten them up :)
- darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
- darwin_file=
- darwin_files=
- for darwin_file in $darwin_filelist; do
- darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP`
- $LIPO -create -output "$darwin_file" $darwin_files
- done # $darwin_filelist
- $RM -rf unfat-$$
- cd "$darwin_orig_dir"
- else
- cd $darwin_orig_dir
- func_extract_an_archive "$my_xdir" "$my_xabs"
- fi # $darwin_arches
- } # !$opt_dry_run
- ;;
- *)
- func_extract_an_archive "$my_xdir" "$my_xabs"
- ;;
- esac
- my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP`
- done
-
- func_extract_archives_result="$my_oldobjs"
-}
-
-
-
-# func_emit_wrapper_part1 [arg=no]
-#
-# Emit the first part of a libtool wrapper script on stdout.
-# For more information, see the description associated with
-# func_emit_wrapper(), below.
-func_emit_wrapper_part1 ()
-{
- func_emit_wrapper_part1_arg1=no
- if test -n "$1" ; then
- func_emit_wrapper_part1_arg1=$1
- fi
-
- $ECHO "\
-#! $SHELL
-
-# $output - temporary wrapper script for $objdir/$outputname
-# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
-#
-# The $output program cannot be directly executed until all the libtool
-# libraries that it depends on are installed.
-#
-# This wrapper script should never be moved out of the build directory.
-# If it is, it will not operate correctly.
-
-# Sed substitution that helps us do robust quoting. It backslashifies
-# metacharacters that are still active within double-quoted strings.
-Xsed='${SED} -e 1s/^X//'
-sed_quote_subst='$sed_quote_subst'
-
-# Be Bourne compatible
-if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
- emulate sh
- NULLCMD=:
- # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
- # is contrary to our usage. Disable this feature.
- alias -g '\${1+\"\$@\"}'='\"\$@\"'
- setopt NO_GLOB_SUBST
-else
- case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
-fi
-BIN_SH=xpg4; export BIN_SH # for Tru64
-DUALCASE=1; export DUALCASE # for MKS sh
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-relink_command=\"$relink_command\"
-
-# This environment variable determines our operation mode.
-if test \"\$libtool_install_magic\" = \"$magic\"; then
- # install mode needs the following variables:
- generated_by_libtool_version='$macro_version'
- notinst_deplibs='$notinst_deplibs'
-else
- # When we are sourced in execute mode, \$file and \$ECHO are already set.
- if test \"\$libtool_execute_magic\" != \"$magic\"; then
- ECHO=\"$qecho\"
- file=\"\$0\"
- # Make sure echo works.
- if test \"X\$1\" = X--no-reexec; then
- # Discard the --no-reexec flag, and continue.
- shift
- elif test \"X\`{ \$ECHO '\t'; } 2>/dev/null\`\" = 'X\t'; then
- # Yippee, \$ECHO works!
- :
- else
- # Restart under the correct shell, and then maybe \$ECHO will work.
- exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"}
- fi
- fi\
-"
- $ECHO "\
-
- # Find the directory that this script lives in.
- thisdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\`
- test \"x\$thisdir\" = \"x\$file\" && thisdir=.
-
- # Follow symbolic links until we get to the real thisdir.
- file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\`
- while test -n \"\$file\"; do
- destdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\`
-
- # If there was a directory component, then change thisdir.
- if test \"x\$destdir\" != \"x\$file\"; then
- case \"\$destdir\" in
- [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
- *) thisdir=\"\$thisdir/\$destdir\" ;;
- esac
- fi
-
- file=\`\$ECHO \"X\$file\" | \$Xsed -e 's%^.*/%%'\`
- file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\`
- done
-"
-}
-# end: func_emit_wrapper_part1
-
-# func_emit_wrapper_part2 [arg=no]
-#
-# Emit the second part of a libtool wrapper script on stdout.
-# For more information, see the description associated with
-# func_emit_wrapper(), below.
-func_emit_wrapper_part2 ()
-{
- func_emit_wrapper_part2_arg1=no
- if test -n "$1" ; then
- func_emit_wrapper_part2_arg1=$1
- fi
-
- $ECHO "\
-
- # Usually 'no', except on cygwin/mingw when embedded into
- # the cwrapper.
- WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_part2_arg1
- if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
- # special case for '.'
- if test \"\$thisdir\" = \".\"; then
- thisdir=\`pwd\`
- fi
- # remove .libs from thisdir
- case \"\$thisdir\" in
- *[\\\\/]$objdir ) thisdir=\`\$ECHO \"X\$thisdir\" | \$Xsed -e 's%[\\\\/][^\\\\/]*$%%'\` ;;
- $objdir ) thisdir=. ;;
- esac
- fi
-
- # Try to get the absolute directory name.
- absdir=\`cd \"\$thisdir\" && pwd\`
- test -n \"\$absdir\" && thisdir=\"\$absdir\"
-"
-
- if test "$fast_install" = yes; then
- $ECHO "\
- program=lt-'$outputname'$exeext
- progdir=\"\$thisdir/$objdir\"
-
- if test ! -f \"\$progdir/\$program\" ||
- { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
- test \"X\$file\" != \"X\$progdir/\$program\"; }; then
-
- file=\"\$\$-\$program\"
-
- if test ! -d \"\$progdir\"; then
- $MKDIR \"\$progdir\"
- else
- $RM \"\$progdir/\$file\"
- fi"
-
- $ECHO "\
-
- # relink executable if necessary
- if test -n \"\$relink_command\"; then
- if relink_command_output=\`eval \$relink_command 2>&1\`; then :
- else
- $ECHO \"\$relink_command_output\" >&2
- $RM \"\$progdir/\$file\"
- exit 1
- fi
- fi
-
- $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
- { $RM \"\$progdir/\$program\";
- $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
- $RM \"\$progdir/\$file\"
- fi"
- else
- $ECHO "\
- program='$outputname'
- progdir=\"\$thisdir/$objdir\"
-"
- fi
-
- $ECHO "\
-
- if test -f \"\$progdir/\$program\"; then"
-
- # Export our shlibpath_var if we have one.
- if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
- $ECHO "\
- # Add our own library path to $shlibpath_var
- $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
-
- # Some systems cannot cope with colon-terminated $shlibpath_var
- # The second colon is a workaround for a bug in BeOS R4 sed
- $shlibpath_var=\`\$ECHO \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\`
-
- export $shlibpath_var
-"
- fi
-
- # fixup the dll searchpath if we need to.
- if test -n "$dllsearchpath"; then
- $ECHO "\
- # Add the dll search path components to the executable PATH
- PATH=$dllsearchpath:\$PATH
-"
- fi
-
- $ECHO "\
- if test \"\$libtool_execute_magic\" != \"$magic\"; then
- # Run the actual program with our arguments.
-"
- case $host in
- # Backslashes separate directories on plain windows
- *-*-mingw | *-*-os2* | *-cegcc*)
- $ECHO "\
- exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
-"
- ;;
-
- *)
- $ECHO "\
- exec \"\$progdir/\$program\" \${1+\"\$@\"}
-"
- ;;
- esac
- $ECHO "\
- \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
- exit 1
- fi
- else
- # The program doesn't exist.
- \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
- \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
- $ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
- exit 1
- fi
-fi\
-"
-}
-# end: func_emit_wrapper_part2
-
-
-# func_emit_wrapper [arg=no]
-#
-# Emit a libtool wrapper script on stdout.
-# Don't directly open a file because we may want to
-# incorporate the script contents within a cygwin/mingw
-# wrapper executable. Must ONLY be called from within
-# func_mode_link because it depends on a number of variables
-# set therein.
-#
-# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
-# variable will take. If 'yes', then the emitted script
-# will assume that the directory in which it is stored is
-# the $objdir directory. This is a cygwin/mingw-specific
-# behavior.
-func_emit_wrapper ()
-{
- func_emit_wrapper_arg1=no
- if test -n "$1" ; then
- func_emit_wrapper_arg1=$1
- fi
-
- # split this up so that func_emit_cwrapperexe_src
- # can call each part independently.
- func_emit_wrapper_part1 "${func_emit_wrapper_arg1}"
- func_emit_wrapper_part2 "${func_emit_wrapper_arg1}"
-}
-
-
-# func_to_host_path arg
-#
-# Convert paths to host format when used with build tools.
-# Intended for use with "native" mingw (where libtool itself
-# is running under the msys shell), or in the following cross-
-# build environments:
-# $build $host
-# mingw (msys) mingw [e.g. native]
-# cygwin mingw
-# *nix + wine mingw
-# where wine is equipped with the `winepath' executable.
-# In the native mingw case, the (msys) shell automatically
-# converts paths for any non-msys applications it launches,
-# but that facility isn't available from inside the cwrapper.
-# Similar accommodations are necessary for $host mingw and
-# $build cygwin. Calling this function does no harm for other
-# $host/$build combinations not listed above.
-#
-# ARG is the path (on $build) that should be converted to
-# the proper representation for $host. The result is stored
-# in $func_to_host_path_result.
-func_to_host_path ()
-{
- func_to_host_path_result="$1"
- if test -n "$1" ; then
- case $host in
- *mingw* )
- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
- case $build in
- *mingw* ) # actually, msys
- # awkward: cmd appends spaces to result
- lt_sed_strip_trailing_spaces="s/[ ]*\$//"
- func_to_host_path_tmp1=`( cmd //c echo "$1" |\
- $SED -e "$lt_sed_strip_trailing_spaces" ) 2>/dev/null || echo ""`
- func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\
- $SED -e "$lt_sed_naive_backslashify"`
- ;;
- *cygwin* )
- func_to_host_path_tmp1=`cygpath -w "$1"`
- func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\
- $SED -e "$lt_sed_naive_backslashify"`
- ;;
- * )
- # Unfortunately, winepath does not exit with a non-zero
- # error code, so we are forced to check the contents of
- # stdout. On the other hand, if the command is not
- # found, the shell will set an exit code of 127 and print
- # *an error message* to stdout. So we must check for both
- # error code of zero AND non-empty stdout, which explains
- # the odd construction:
- func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null`
- if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then
- func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\
- $SED -e "$lt_sed_naive_backslashify"`
- else
- # Allow warning below.
- func_to_host_path_result=""
- fi
- ;;
- esac
- if test -z "$func_to_host_path_result" ; then
- func_error "Could not determine host path corresponding to"
- func_error " '$1'"
- func_error "Continuing, but uninstalled executables may not work."
- # Fallback:
- func_to_host_path_result="$1"
- fi
- ;;
- esac
- fi
-}
-# end: func_to_host_path
-
-# func_to_host_pathlist arg
-#
-# Convert pathlists to host format when used with build tools.
-# See func_to_host_path(), above. This function supports the
-# following $build/$host combinations (but does no harm for
-# combinations not listed here):
-# $build $host
-# mingw (msys) mingw [e.g. native]
-# cygwin mingw
-# *nix + wine mingw
-#
-# Path separators are also converted from $build format to
-# $host format. If ARG begins or ends with a path separator
-# character, it is preserved (but converted to $host format)
-# on output.
-#
-# ARG is a pathlist (on $build) that should be converted to
-# the proper representation on $host. The result is stored
-# in $func_to_host_pathlist_result.
-func_to_host_pathlist ()
-{
- func_to_host_pathlist_result="$1"
- if test -n "$1" ; then
- case $host in
- *mingw* )
- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
- # Remove leading and trailing path separator characters from
- # ARG. msys behavior is inconsistent here, cygpath turns them
- # into '.;' and ';.', and winepath ignores them completely.
- func_to_host_pathlist_tmp2="$1"
- # Once set for this call, this variable should not be
- # reassigned. It is used in tha fallback case.
- func_to_host_pathlist_tmp1=`echo "$func_to_host_pathlist_tmp2" |\
- $SED -e 's|^:*||' -e 's|:*$||'`
- case $build in
- *mingw* ) # Actually, msys.
- # Awkward: cmd appends spaces to result.
- lt_sed_strip_trailing_spaces="s/[ ]*\$//"
- func_to_host_pathlist_tmp2=`( cmd //c echo "$func_to_host_pathlist_tmp1" |\
- $SED -e "$lt_sed_strip_trailing_spaces" ) 2>/dev/null || echo ""`
- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp2" |\
- $SED -e "$lt_sed_naive_backslashify"`
- ;;
- *cygwin* )
- func_to_host_pathlist_tmp2=`cygpath -w -p "$func_to_host_pathlist_tmp1"`
- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp2" |\
- $SED -e "$lt_sed_naive_backslashify"`
- ;;
- * )
- # unfortunately, winepath doesn't convert pathlists
- func_to_host_pathlist_result=""
- func_to_host_pathlist_oldIFS=$IFS
- IFS=:
- for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do
- IFS=$func_to_host_pathlist_oldIFS
- if test -n "$func_to_host_pathlist_f" ; then
- func_to_host_path "$func_to_host_pathlist_f"
- if test -n "$func_to_host_path_result" ; then
- if test -z "$func_to_host_pathlist_result" ; then
- func_to_host_pathlist_result="$func_to_host_path_result"
- else
- func_to_host_pathlist_result="$func_to_host_pathlist_result;$func_to_host_path_result"
- fi
- fi
- fi
- IFS=:
- done
- IFS=$func_to_host_pathlist_oldIFS
- ;;
- esac
- if test -z "$func_to_host_pathlist_result" ; then
- func_error "Could not determine the host path(s) corresponding to"
- func_error " '$1'"
- func_error "Continuing, but uninstalled executables may not work."
- # Fallback. This may break if $1 contains DOS-style drive
- # specifications. The fix is not to complicate the expression
- # below, but for the user to provide a working wine installation
- # with winepath so that path translation in the cross-to-mingw
- # case works properly.
- lt_replace_pathsep_nix_to_dos="s|:|;|g"
- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\
- $SED -e "$lt_replace_pathsep_nix_to_dos"`
- fi
- # Now, add the leading and trailing path separators back
- case "$1" in
- :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result"
- ;;
- esac
- case "$1" in
- *: ) func_to_host_pathlist_result="$func_to_host_pathlist_result;"
- ;;
- esac
- ;;
- esac
- fi
-}
-# end: func_to_host_pathlist
-
-# func_emit_cwrapperexe_src
-# emit the source code for a wrapper executable on stdout
-# Must ONLY be called from within func_mode_link because
-# it depends on a number of variable set therein.
-func_emit_cwrapperexe_src ()
-{
- cat <<EOF
-
-/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
- Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
-
- The $output program cannot be directly executed until all the libtool
- libraries that it depends on are installed.
-
- This wrapper executable should never be moved out of the build directory.
- If it is, it will not operate correctly.
-
- Currently, it simply execs the wrapper *script* "$SHELL $output",
- but could eventually absorb all of the scripts functionality and
- exec $objdir/$outputname directly.
-*/
-EOF
- cat <<"EOF"
-#include <stdio.h>
-#include <stdlib.h>
-#ifdef _MSC_VER
-# include <direct.h>
-# include <process.h>
-# include <io.h>
-# define setmode _setmode
-#else
-# include <unistd.h>
-# include <stdint.h>
-# ifdef __CYGWIN__
-# include <io.h>
-# define HAVE_SETENV
-# ifdef __STRICT_ANSI__
-char *realpath (const char *, char *);
-int putenv (char *);
-int setenv (const char *, const char *, int);
-# endif
-# endif
-#endif
-#include <malloc.h>
-#include <stdarg.h>
-#include <assert.h>
-#include <string.h>
-#include <ctype.h>
-#include <errno.h>
-#include <fcntl.h>
-#include <sys/stat.h>
-
-#if defined(PATH_MAX)
-# define LT_PATHMAX PATH_MAX
-#elif defined(MAXPATHLEN)
-# define LT_PATHMAX MAXPATHLEN
-#else
-# define LT_PATHMAX 1024
-#endif
-
-#ifndef S_IXOTH
-# define S_IXOTH 0
-#endif
-#ifndef S_IXGRP
-# define S_IXGRP 0
-#endif
-
-#ifdef _MSC_VER
-# define S_IXUSR _S_IEXEC
-# define stat _stat
-# ifndef _INTPTR_T_DEFINED
-# define intptr_t int
-# endif
-#endif
-
-#ifndef DIR_SEPARATOR
-# define DIR_SEPARATOR '/'
-# define PATH_SEPARATOR ':'
-#endif
-
-#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
- defined (__OS2__)
-# define HAVE_DOS_BASED_FILE_SYSTEM
-# define FOPEN_WB "wb"
-# ifndef DIR_SEPARATOR_2
-# define DIR_SEPARATOR_2 '\\'
-# endif
-# ifndef PATH_SEPARATOR_2
-# define PATH_SEPARATOR_2 ';'
-# endif
-#endif
-
-#ifndef DIR_SEPARATOR_2
-# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
-#else /* DIR_SEPARATOR_2 */
-# define IS_DIR_SEPARATOR(ch) \
- (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
-#endif /* DIR_SEPARATOR_2 */
-
-#ifndef PATH_SEPARATOR_2
-# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
-#else /* PATH_SEPARATOR_2 */
-# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
-#endif /* PATH_SEPARATOR_2 */
-
-#ifdef __CYGWIN__
-# define FOPEN_WB "wb"
-#endif
-
-#ifndef FOPEN_WB
-# define FOPEN_WB "w"
-#endif
-#ifndef _O_BINARY
-# define _O_BINARY 0
-#endif
-
-#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
-#define XFREE(stale) do { \
- if (stale) { free ((void *) stale); stale = 0; } \
-} while (0)
-
-#undef LTWRAPPER_DEBUGPRINTF
-#if defined DEBUGWRAPPER
-# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args
-static void
-ltwrapper_debugprintf (const char *fmt, ...)
-{
- va_list args;
- va_start (args, fmt);
- (void) vfprintf (stderr, fmt, args);
- va_end (args);
-}
-#else
-# define LTWRAPPER_DEBUGPRINTF(args)
-#endif
-
-const char *program_name = NULL;
-
-void *xmalloc (size_t num);
-char *xstrdup (const char *string);
-const char *base_name (const char *name);
-char *find_executable (const char *wrapper);
-char *chase_symlinks (const char *pathspec);
-int make_executable (const char *path);
-int check_executable (const char *path);
-char *strendzap (char *str, const char *pat);
-void lt_fatal (const char *message, ...);
-void lt_setenv (const char *name, const char *value);
-char *lt_extend_str (const char *orig_value, const char *add, int to_end);
-void lt_opt_process_env_set (const char *arg);
-void lt_opt_process_env_prepend (const char *arg);
-void lt_opt_process_env_append (const char *arg);
-int lt_split_name_value (const char *arg, char** name, char** value);
-void lt_update_exe_path (const char *name, const char *value);
-void lt_update_lib_path (const char *name, const char *value);
-
-static const char *script_text_part1 =
-EOF
-
- func_emit_wrapper_part1 yes |
- $SED -e 's/\([\\"]\)/\\\1/g' \
- -e 's/^/ "/' -e 's/$/\\n"/'
- echo ";"
- cat <<EOF
-
-static const char *script_text_part2 =
-EOF
- func_emit_wrapper_part2 yes |
- $SED -e 's/\([\\"]\)/\\\1/g' \
- -e 's/^/ "/' -e 's/$/\\n"/'
- echo ";"
-
- cat <<EOF
-const char * MAGIC_EXE = "$magic_exe";
-const char * LIB_PATH_VARNAME = "$shlibpath_var";
-EOF
-
- if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
- func_to_host_pathlist "$temp_rpath"
- cat <<EOF
-const char * LIB_PATH_VALUE = "$func_to_host_pathlist_result";
-EOF
- else
- cat <<"EOF"
-const char * LIB_PATH_VALUE = "";
-EOF
- fi
-
- if test -n "$dllsearchpath"; then
- func_to_host_pathlist "$dllsearchpath:"
- cat <<EOF
-const char * EXE_PATH_VARNAME = "PATH";
-const char * EXE_PATH_VALUE = "$func_to_host_pathlist_result";
-EOF
- else
- cat <<"EOF"
-const char * EXE_PATH_VARNAME = "";
-const char * EXE_PATH_VALUE = "";
-EOF
- fi
-
- if test "$fast_install" = yes; then
- cat <<EOF
-const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
-EOF
- else
- cat <<EOF
-const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
-EOF
- fi
-
-
- cat <<"EOF"
-
-#define LTWRAPPER_OPTION_PREFIX "--lt-"
-#define LTWRAPPER_OPTION_PREFIX_LENGTH 5
-
-static const size_t opt_prefix_len = LTWRAPPER_OPTION_PREFIX_LENGTH;
-static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
-
-static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
-
-static const size_t env_set_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 7;
-static const char *env_set_opt = LTWRAPPER_OPTION_PREFIX "env-set";
- /* argument is putenv-style "foo=bar", value of foo is set to bar */
-
-static const size_t env_prepend_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 11;
-static const char *env_prepend_opt = LTWRAPPER_OPTION_PREFIX "env-prepend";
- /* argument is putenv-style "foo=bar", new value of foo is bar${foo} */
-
-static const size_t env_append_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 10;
-static const char *env_append_opt = LTWRAPPER_OPTION_PREFIX "env-append";
- /* argument is putenv-style "foo=bar", new value of foo is ${foo}bar */
-
-int
-main (int argc, char *argv[])
-{
- char **newargz;
- int newargc;
- char *tmp_pathspec;
- char *actual_cwrapper_path;
- char *actual_cwrapper_name;
- char *target_name;
- char *lt_argv_zero;
- intptr_t rval = 127;
-
- int i;
-
- program_name = (char *) xstrdup (base_name (argv[0]));
- LTWRAPPER_DEBUGPRINTF (("(main) argv[0] : %s\n", argv[0]));
- LTWRAPPER_DEBUGPRINTF (("(main) program_name : %s\n", program_name));
-
- /* very simple arg parsing; don't want to rely on getopt */
- for (i = 1; i < argc; i++)
- {
- if (strcmp (argv[i], dumpscript_opt) == 0)
- {
-EOF
- case "$host" in
- *mingw* | *cygwin* )
- # make stdout use "unix" line endings
- echo " setmode(1,_O_BINARY);"
- ;;
- esac
-
- cat <<"EOF"
- printf ("%s", script_text_part1);
- printf ("%s", script_text_part2);
- return 0;
- }
- }
-
- newargz = XMALLOC (char *, argc + 1);
- tmp_pathspec = find_executable (argv[0]);
- if (tmp_pathspec == NULL)
- lt_fatal ("Couldn't find %s", argv[0]);
- LTWRAPPER_DEBUGPRINTF (("(main) found exe (before symlink chase) at : %s\n",
- tmp_pathspec));
-
- actual_cwrapper_path = chase_symlinks (tmp_pathspec);
- LTWRAPPER_DEBUGPRINTF (("(main) found exe (after symlink chase) at : %s\n",
- actual_cwrapper_path));
- XFREE (tmp_pathspec);
-
- actual_cwrapper_name = xstrdup( base_name (actual_cwrapper_path));
- strendzap (actual_cwrapper_path, actual_cwrapper_name);
-
- /* wrapper name transforms */
- strendzap (actual_cwrapper_name, ".exe");
- tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
- XFREE (actual_cwrapper_name);
- actual_cwrapper_name = tmp_pathspec;
- tmp_pathspec = 0;
-
- /* target_name transforms -- use actual target program name; might have lt- prefix */
- target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
- strendzap (target_name, ".exe");
- tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
- XFREE (target_name);
- target_name = tmp_pathspec;
- tmp_pathspec = 0;
-
- LTWRAPPER_DEBUGPRINTF (("(main) libtool target name: %s\n",
- target_name));
-EOF
-
- cat <<EOF
- newargz[0] =
- XMALLOC (char, (strlen (actual_cwrapper_path) +
- strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
- strcpy (newargz[0], actual_cwrapper_path);
- strcat (newargz[0], "$objdir");
- strcat (newargz[0], "/");
-EOF
-
- cat <<"EOF"
- /* stop here, and copy so we don't have to do this twice */
- tmp_pathspec = xstrdup (newargz[0]);
-
- /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
- strcat (newargz[0], actual_cwrapper_name);
-
- /* DO want the lt- prefix here if it exists, so use target_name */
- lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
- XFREE (tmp_pathspec);
- tmp_pathspec = NULL;
-EOF
-
- case $host_os in
- mingw*)
- cat <<"EOF"
- {
- char* p;
- while ((p = strchr (newargz[0], '\\')) != NULL)
- {
- *p = '/';
- }
- while ((p = strchr (lt_argv_zero, '\\')) != NULL)
- {
- *p = '/';
- }
- }
-EOF
- ;;
- esac
-
- cat <<"EOF"
- XFREE (target_name);
- XFREE (actual_cwrapper_path);
- XFREE (actual_cwrapper_name);
-
- lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
- lt_setenv ("DUALCASE", "1"); /* for MSK sh */
- lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
- lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
-
- newargc=0;
- for (i = 1; i < argc; i++)
- {
- if (strncmp (argv[i], env_set_opt, env_set_opt_len) == 0)
- {
- if (argv[i][env_set_opt_len] == '=')
- {
- const char *p = argv[i] + env_set_opt_len + 1;
- lt_opt_process_env_set (p);
- }
- else if (argv[i][env_set_opt_len] == '\0' && i + 1 < argc)
- {
- lt_opt_process_env_set (argv[++i]); /* don't copy */
- }
- else
- lt_fatal ("%s missing required argument", env_set_opt);
- continue;
- }
- if (strncmp (argv[i], env_prepend_opt, env_prepend_opt_len) == 0)
- {
- if (argv[i][env_prepend_opt_len] == '=')
- {
- const char *p = argv[i] + env_prepend_opt_len + 1;
- lt_opt_process_env_prepend (p);
- }
- else if (argv[i][env_prepend_opt_len] == '\0' && i + 1 < argc)
- {
- lt_opt_process_env_prepend (argv[++i]); /* don't copy */
- }
- else
- lt_fatal ("%s missing required argument", env_prepend_opt);
- continue;
- }
- if (strncmp (argv[i], env_append_opt, env_append_opt_len) == 0)
- {
- if (argv[i][env_append_opt_len] == '=')
- {
- const char *p = argv[i] + env_append_opt_len + 1;
- lt_opt_process_env_append (p);
- }
- else if (argv[i][env_append_opt_len] == '\0' && i + 1 < argc)
- {
- lt_opt_process_env_append (argv[++i]); /* don't copy */
- }
- else
- lt_fatal ("%s missing required argument", env_append_opt);
- continue;
- }
- if (strncmp (argv[i], ltwrapper_option_prefix, opt_prefix_len) == 0)
- {
- /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
- namespace, but it is not one of the ones we know about and
- have already dealt with, above (inluding dump-script), then
- report an error. Otherwise, targets might begin to believe
- they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
- namespace. The first time any user complains about this, we'll
- need to make LTWRAPPER_OPTION_PREFIX a configure-time option
- or a configure.ac-settable value.
- */
- lt_fatal ("Unrecognized option in %s namespace: '%s'",
- ltwrapper_option_prefix, argv[i]);
- }
- /* otherwise ... */
- newargz[++newargc] = xstrdup (argv[i]);
- }
- newargz[++newargc] = NULL;
-
- LTWRAPPER_DEBUGPRINTF (("(main) lt_argv_zero : %s\n", (lt_argv_zero ? lt_argv_zero : "<NULL>")));
- for (i = 0; i < newargc; i++)
- {
- LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : "<NULL>")));
- }
-
-EOF
-
- case $host_os in
- mingw*)
- cat <<"EOF"
- /* execv doesn't actually work on mingw as expected on unix */
- rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
- if (rval == -1)
- {
- /* failed to start process */
- LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno));
- return 127;
- }
- return rval;
-EOF
- ;;
- *)
- cat <<"EOF"
- execv (lt_argv_zero, newargz);
- return rval; /* =127, but avoids unused variable warning */
-EOF
- ;;
- esac
-
- cat <<"EOF"
-}
-
-void *
-xmalloc (size_t num)
-{
- void *p = (void *) malloc (num);
- if (!p)
- lt_fatal ("Memory exhausted");
-
- return p;
-}
-
-char *
-xstrdup (const char *string)
-{
- return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
- string) : NULL;
-}
-
-const char *
-base_name (const char *name)
-{
- const char *base;
-
-#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
- /* Skip over the disk name in MSDOS pathnames. */
- if (isalpha ((unsigned char) name[0]) && name[1] == ':')
- name += 2;
-#endif
-
- for (base = name; *name; name++)
- if (IS_DIR_SEPARATOR (*name))
- base = name + 1;
- return base;
-}
-
-int
-check_executable (const char *path)
-{
- struct stat st;
-
- LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n",
- path ? (*path ? path : "EMPTY!") : "NULL!"));
- if ((!path) || (!*path))
- return 0;
-
- if ((stat (path, &st) >= 0)
- && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
- return 1;
- else
- return 0;
-}
-
-int
-make_executable (const char *path)
-{
- int rval = 0;
- struct stat st;
-
- LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n",
- path ? (*path ? path : "EMPTY!") : "NULL!"));
- if ((!path) || (!*path))
- return 0;
-
- if (stat (path, &st) >= 0)
- {
- rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
- }
- return rval;
-}
-
-/* Searches for the full path of the wrapper. Returns
- newly allocated full path name if found, NULL otherwise
- Does not chase symlinks, even on platforms that support them.
-*/
-char *
-find_executable (const char *wrapper)
-{
- int has_slash = 0;
- const char *p;
- const char *p_next;
- /* static buffer for getcwd */
- char tmp[LT_PATHMAX + 1];
- int tmp_len;
- char *concat_name;
-
- LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n",
- wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!"));
-
- if ((wrapper == NULL) || (*wrapper == '\0'))
- return NULL;
-
- /* Absolute path? */
-#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
- if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
- {
- concat_name = xstrdup (wrapper);
- if (check_executable (concat_name))
- return concat_name;
- XFREE (concat_name);
- }
- else
- {
-#endif
- if (IS_DIR_SEPARATOR (wrapper[0]))
- {
- concat_name = xstrdup (wrapper);
- if (check_executable (concat_name))
- return concat_name;
- XFREE (concat_name);
- }
-#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
- }
-#endif
-
- for (p = wrapper; *p; p++)
- if (*p == '/')
- {
- has_slash = 1;
- break;
- }
- if (!has_slash)
- {
- /* no slashes; search PATH */
- const char *path = getenv ("PATH");
- if (path != NULL)
- {
- for (p = path; *p; p = p_next)
- {
- const char *q;
- size_t p_len;
- for (q = p; *q; q++)
- if (IS_PATH_SEPARATOR (*q))
- break;
- p_len = q - p;
- p_next = (*q == '\0' ? q : q + 1);
- if (p_len == 0)
- {
- /* empty path: current directory */
- if (getcwd (tmp, LT_PATHMAX) == NULL)
- lt_fatal ("getcwd failed");
- tmp_len = strlen (tmp);
- concat_name =
- XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
- memcpy (concat_name, tmp, tmp_len);
- concat_name[tmp_len] = '/';
- strcpy (concat_name + tmp_len + 1, wrapper);
- }
- else
- {
- concat_name =
- XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
- memcpy (concat_name, p, p_len);
- concat_name[p_len] = '/';
- strcpy (concat_name + p_len + 1, wrapper);
- }
- if (check_executable (concat_name))
- return concat_name;
- XFREE (concat_name);
- }
- }
- /* not found in PATH; assume curdir */
- }
- /* Relative path | not found in path: prepend cwd */
- if (getcwd (tmp, LT_PATHMAX) == NULL)
- lt_fatal ("getcwd failed");
- tmp_len = strlen (tmp);
- concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
- memcpy (concat_name, tmp, tmp_len);
- concat_name[tmp_len] = '/';
- strcpy (concat_name + tmp_len + 1, wrapper);
-
- if (check_executable (concat_name))
- return concat_name;
- XFREE (concat_name);
- return NULL;
-}
-
-char *
-chase_symlinks (const char *pathspec)
-{
-#ifndef S_ISLNK
- return xstrdup (pathspec);
-#else
- char buf[LT_PATHMAX];
- struct stat s;
- char *tmp_pathspec = xstrdup (pathspec);
- char *p;
- int has_symlinks = 0;
- while (strlen (tmp_pathspec) && !has_symlinks)
- {
- LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n",
- tmp_pathspec));
- if (lstat (tmp_pathspec, &s) == 0)
- {
- if (S_ISLNK (s.st_mode) != 0)
- {
- has_symlinks = 1;
- break;
- }
-
- /* search backwards for last DIR_SEPARATOR */
- p = tmp_pathspec + strlen (tmp_pathspec) - 1;
- while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
- p--;
- if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
- {
- /* no more DIR_SEPARATORS left */
- break;
- }
- *p = '\0';
- }
- else
- {
- char *errstr = strerror (errno);
- lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr);
- }
- }
- XFREE (tmp_pathspec);
-
- if (!has_symlinks)
- {
- return xstrdup (pathspec);
- }
-
- tmp_pathspec = realpath (pathspec, buf);
- if (tmp_pathspec == 0)
- {
- lt_fatal ("Could not follow symlinks for %s", pathspec);
- }
- return xstrdup (tmp_pathspec);
-#endif
-}
-
-char *
-strendzap (char *str, const char *pat)
-{
- size_t len, patlen;
-
- assert (str != NULL);
- assert (pat != NULL);
-
- len = strlen (str);
- patlen = strlen (pat);
-
- if (patlen <= len)
- {
- str += len - patlen;
- if (strcmp (str, pat) == 0)
- *str = '\0';
- }
- return str;
-}
-
-static void
-lt_error_core (int exit_status, const char *mode,
- const char *message, va_list ap)
-{
- fprintf (stderr, "%s: %s: ", program_name, mode);
- vfprintf (stderr, message, ap);
- fprintf (stderr, ".\n");
-
- if (exit_status >= 0)
- exit (exit_status);
-}
-
-void
-lt_fatal (const char *message, ...)
-{
- va_list ap;
- va_start (ap, message);
- lt_error_core (EXIT_FAILURE, "FATAL", message, ap);
- va_end (ap);
-}
-
-void
-lt_setenv (const char *name, const char *value)
-{
- LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n",
- (name ? name : "<NULL>"),
- (value ? value : "<NULL>")));
- {
-#ifdef HAVE_SETENV
- /* always make a copy, for consistency with !HAVE_SETENV */
- char *str = xstrdup (value);
- setenv (name, str, 1);
-#else
- int len = strlen (name) + 1 + strlen (value) + 1;
- char *str = XMALLOC (char, len);
- sprintf (str, "%s=%s", name, value);
- if (putenv (str) != EXIT_SUCCESS)
- {
- XFREE (str);
- }
-#endif
- }
-}
-
-char *
-lt_extend_str (const char *orig_value, const char *add, int to_end)
-{
- char *new_value;
- if (orig_value && *orig_value)
- {
- int orig_value_len = strlen (orig_value);
- int add_len = strlen (add);
- new_value = XMALLOC (char, add_len + orig_value_len + 1);
- if (to_end)
- {
- strcpy (new_value, orig_value);
- strcpy (new_value + orig_value_len, add);
- }
- else
- {
- strcpy (new_value, add);
- strcpy (new_value + add_len, orig_value);
- }
- }
- else
- {
- new_value = xstrdup (add);
- }
- return new_value;
-}
-
-int
-lt_split_name_value (const char *arg, char** name, char** value)
-{
- const char *p;
- int len;
- if (!arg || !*arg)
- return 1;
-
- p = strchr (arg, (int)'=');
-
- if (!p)
- return 1;
-
- *value = xstrdup (++p);
-
- len = strlen (arg) - strlen (*value);
- *name = XMALLOC (char, len);
- strncpy (*name, arg, len-1);
- (*name)[len - 1] = '\0';
-
- return 0;
-}
-
-void
-lt_opt_process_env_set (const char *arg)
-{
- char *name = NULL;
- char *value = NULL;
-
- if (lt_split_name_value (arg, &name, &value) != 0)
- {
- XFREE (name);
- XFREE (value);
- lt_fatal ("bad argument for %s: '%s'", env_set_opt, arg);
- }
-
- lt_setenv (name, value);
- XFREE (name);
- XFREE (value);
-}
-
-void
-lt_opt_process_env_prepend (const char *arg)
-{
- char *name = NULL;
- char *value = NULL;
- char *new_value = NULL;
-
- if (lt_split_name_value (arg, &name, &value) != 0)
- {
- XFREE (name);
- XFREE (value);
- lt_fatal ("bad argument for %s: '%s'", env_prepend_opt, arg);
- }
-
- new_value = lt_extend_str (getenv (name), value, 0);
- lt_setenv (name, new_value);
- XFREE (new_value);
- XFREE (name);
- XFREE (value);
-}
-
-void
-lt_opt_process_env_append (const char *arg)
-{
- char *name = NULL;
- char *value = NULL;
- char *new_value = NULL;
-
- if (lt_split_name_value (arg, &name, &value) != 0)
- {
- XFREE (name);
- XFREE (value);
- lt_fatal ("bad argument for %s: '%s'", env_append_opt, arg);
- }
-
- new_value = lt_extend_str (getenv (name), value, 1);
- lt_setenv (name, new_value);
- XFREE (new_value);
- XFREE (name);
- XFREE (value);
-}
-
-void
-lt_update_exe_path (const char *name, const char *value)
-{
- LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
- (name ? name : "<NULL>"),
- (value ? value : "<NULL>")));
-
- if (name && *name && value && *value)
- {
- char *new_value = lt_extend_str (getenv (name), value, 0);
- /* some systems can't cope with a ':'-terminated path #' */
- int len = strlen (new_value);
- while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
- {
- new_value[len-1] = '\0';
- }
- lt_setenv (name, new_value);
- XFREE (new_value);
- }
-}
-
-void
-lt_update_lib_path (const char *name, const char *value)
-{
- LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
- (name ? name : "<NULL>"),
- (value ? value : "<NULL>")));
-
- if (name && *name && value && *value)
- {
- char *new_value = lt_extend_str (getenv (name), value, 0);
- lt_setenv (name, new_value);
- XFREE (new_value);
- }
-}
-
-
-EOF
-}
-# end: func_emit_cwrapperexe_src
-
-# func_mode_link arg...
-func_mode_link ()
-{
- $opt_debug
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
- # It is impossible to link a dll without this setting, and
- # we shouldn't force the makefile maintainer to figure out
- # which system we are compiling for in order to pass an extra
- # flag for every libtool invocation.
- # allow_undefined=no
-
- # FIXME: Unfortunately, there are problems with the above when trying
- # to make a dll which has undefined symbols, in which case not
- # even a static library is built. For now, we need to specify
- # -no-undefined on the libtool link line when we can be certain
- # that all symbols are satisfied, otherwise we get a static library.
- allow_undefined=yes
- ;;
- *)
- allow_undefined=yes
- ;;
- esac
- libtool_args=$nonopt
- base_compile="$nonopt $@"
- compile_command=$nonopt
- finalize_command=$nonopt
-
- compile_rpath=
- finalize_rpath=
- compile_shlibpath=
- finalize_shlibpath=
- convenience=
- old_convenience=
- deplibs=
- old_deplibs=
- compiler_flags=
- linker_flags=
- dllsearchpath=
- lib_search_path=`pwd`
- inst_prefix_dir=
- new_inherited_linker_flags=
-
- avoid_version=no
- dlfiles=
- dlprefiles=
- dlself=no
- export_dynamic=no
- export_symbols=
- export_symbols_regex=
- generated=
- libobjs=
- ltlibs=
- module=no
- no_install=no
- objs=
- non_pic_objects=
- precious_files_regex=
- prefer_static_libs=no
- preload=no
- prev=
- prevarg=
- release=
- rpath=
- xrpath=
- perm_rpath=
- temp_rpath=
- thread_safe=no
- vinfo=
- vinfo_number=no
- weak_libs=
- single_module="${wl}-single_module"
- func_infer_tag $base_compile
-
- # We need to know -static, to get the right output filenames.
- for arg
- do
- case $arg in
- -shared)
- test "$build_libtool_libs" != yes && \
- func_fatal_configuration "can not build a shared library"
- build_old_libs=no
- break
- ;;
- -all-static | -static | -static-libtool-libs)
- case $arg in
- -all-static)
- if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
- func_warning "complete static linking is impossible in this configuration"
- fi
- if test -n "$link_static_flag"; then
- dlopen_self=$dlopen_self_static
- fi
- prefer_static_libs=yes
- ;;
- -static)
- if test -z "$pic_flag" && test -n "$link_static_flag"; then
- dlopen_self=$dlopen_self_static
- fi
- prefer_static_libs=built
- ;;
- -static-libtool-libs)
- if test -z "$pic_flag" && test -n "$link_static_flag"; then
- dlopen_self=$dlopen_self_static
- fi
- prefer_static_libs=yes
- ;;
- esac
- build_libtool_libs=no
- build_old_libs=yes
- break
- ;;
- esac
- done
-
- # See if our shared archives depend on static archives.
- test -n "$old_archive_from_new_cmds" && build_old_libs=yes
-
- # Go through the arguments, transforming them on the way.
- while test "$#" -gt 0; do
- arg="$1"
- shift
- func_quote_for_eval "$arg"
- qarg=$func_quote_for_eval_unquoted_result
- func_append libtool_args " $func_quote_for_eval_result"
-
- # If the previous option needs an argument, assign it.
- if test -n "$prev"; then
- case $prev in
- output)
- func_append compile_command " @OUTPUT@"
- func_append finalize_command " @OUTPUT@"
- ;;
- esac
-
- case $prev in
- dlfiles|dlprefiles)
- if test "$preload" = no; then
- # Add the symbol object into the linking commands.
- func_append compile_command " @SYMFILE@"
- func_append finalize_command " @SYMFILE@"
- preload=yes
- fi
- case $arg in
- *.la | *.lo) ;; # We handle these cases below.
- force)
- if test "$dlself" = no; then
- dlself=needless
- export_dynamic=yes
- fi
- prev=
- continue
- ;;
- self)
- if test "$prev" = dlprefiles; then
- dlself=yes
- elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
- dlself=yes
- else
- dlself=needless
- export_dynamic=yes
- fi
- prev=
- continue
- ;;
- *)
- if test "$prev" = dlfiles; then
- dlfiles="$dlfiles $arg"
- else
- dlprefiles="$dlprefiles $arg"
- fi
- prev=
- continue
- ;;
- esac
- ;;
- expsyms)
- export_symbols="$arg"
- test -f "$arg" \
- || func_fatal_error "symbol file \`$arg' does not exist"
- prev=
- continue
- ;;
- expsyms_regex)
- export_symbols_regex="$arg"
- prev=
- continue
- ;;
- framework)
- case $host in
- *-*-darwin*)
- case "$deplibs " in
- *" $qarg.ltframework "*) ;;
- *) deplibs="$deplibs $qarg.ltframework" # this is fixed later
- ;;
- esac
- ;;
- esac
- prev=
- continue
- ;;
- inst_prefix)
- inst_prefix_dir="$arg"
- prev=
- continue
- ;;
- objectlist)
- if test -f "$arg"; then
- save_arg=$arg
- moreargs=
- for fil in `cat "$save_arg"`
- do
-# moreargs="$moreargs $fil"
- arg=$fil
- # A libtool-controlled object.
-
- # Check to see that this really is a libtool object.
- if func_lalib_unsafe_p "$arg"; then
- pic_object=
- non_pic_object=
-
- # Read the .lo file
- func_source "$arg"
-
- if test -z "$pic_object" ||
- test -z "$non_pic_object" ||
- test "$pic_object" = none &&
- test "$non_pic_object" = none; then
- func_fatal_error "cannot find name of object for \`$arg'"
- fi
-
- # Extract subdirectory from the argument.
- func_dirname "$arg" "/" ""
- xdir="$func_dirname_result"
-
- if test "$pic_object" != none; then
- # Prepend the subdirectory the object is found in.
- pic_object="$xdir$pic_object"
-
- if test "$prev" = dlfiles; then
- if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
- dlfiles="$dlfiles $pic_object"
- prev=
- continue
- else
- # If libtool objects are unsupported, then we need to preload.
- prev=dlprefiles
- fi
- fi
-
- # CHECK ME: I think I busted this. -Ossama
- if test "$prev" = dlprefiles; then
- # Preload the old-style object.
- dlprefiles="$dlprefiles $pic_object"
- prev=
- fi
-
- # A PIC object.
- func_append libobjs " $pic_object"
- arg="$pic_object"
- fi
-
- # Non-PIC object.
- if test "$non_pic_object" != none; then
- # Prepend the subdirectory the object is found in.
- non_pic_object="$xdir$non_pic_object"
-
- # A standard non-PIC object
- func_append non_pic_objects " $non_pic_object"
- if test -z "$pic_object" || test "$pic_object" = none ; then
- arg="$non_pic_object"
- fi
- else
- # If the PIC object exists, use it instead.
- # $xdir was prepended to $pic_object above.
- non_pic_object="$pic_object"
- func_append non_pic_objects " $non_pic_object"
- fi
- else
- # Only an error if not doing a dry-run.
- if $opt_dry_run; then
- # Extract subdirectory from the argument.
- func_dirname "$arg" "/" ""
- xdir="$func_dirname_result"
-
- func_lo2o "$arg"
- pic_object=$xdir$objdir/$func_lo2o_result
- non_pic_object=$xdir$func_lo2o_result
- func_append libobjs " $pic_object"
- func_append non_pic_objects " $non_pic_object"
- else
- func_fatal_error "\`$arg' is not a valid libtool object"
- fi
- fi
- done
- else
- func_fatal_error "link input file \`$arg' does not exist"
- fi
- arg=$save_arg
- prev=
- continue
- ;;
- precious_regex)
- precious_files_regex="$arg"
- prev=
- continue
- ;;
- release)
- release="-$arg"
- prev=
- continue
- ;;
- rpath | xrpath)
- # We need an absolute path.
- case $arg in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
- *)
- func_fatal_error "only absolute run-paths are allowed"
- ;;
- esac
- if test "$prev" = rpath; then
- case "$rpath " in
- *" $arg "*) ;;
- *) rpath="$rpath $arg" ;;
- esac
- else
- case "$xrpath " in
- *" $arg "*) ;;
- *) xrpath="$xrpath $arg" ;;
- esac
- fi
- prev=
- continue
- ;;
- shrext)
- shrext_cmds="$arg"
- prev=
- continue
- ;;
- weak)
- weak_libs="$weak_libs $arg"
- prev=
- continue
- ;;
- xcclinker)
- linker_flags="$linker_flags $qarg"
- compiler_flags="$compiler_flags $qarg"
- prev=
- func_append compile_command " $qarg"
- func_append finalize_command " $qarg"
- continue
- ;;
- xcompiler)
- compiler_flags="$compiler_flags $qarg"
- prev=
- func_append compile_command " $qarg"
- func_append finalize_command " $qarg"
- continue
- ;;
- xlinker)
- linker_flags="$linker_flags $qarg"
- compiler_flags="$compiler_flags $wl$qarg"
- prev=
- func_append compile_command " $wl$qarg"
- func_append finalize_command " $wl$qarg"
- continue
- ;;
- *)
- eval "$prev=\"\$arg\""
- prev=
- continue
- ;;
- esac
- fi # test -n "$prev"
-
- prevarg="$arg"
-
- case $arg in
- -all-static)
- if test -n "$link_static_flag"; then
- # See comment for -static flag below, for more details.
- func_append compile_command " $link_static_flag"
- func_append finalize_command " $link_static_flag"
- fi
- continue
- ;;
-
- -allow-undefined)
- # FIXME: remove this flag sometime in the future.
- func_fatal_error "\`-allow-undefined' must not be used because it is the default"
- ;;
-
- -avoid-version)
- avoid_version=yes
- continue
- ;;
-
- -dlopen)
- prev=dlfiles
- continue
- ;;
-
- -dlpreopen)
- prev=dlprefiles
- continue
- ;;
-
- -export-dynamic)
- export_dynamic=yes
- continue
- ;;
-
- -export-symbols | -export-symbols-regex)
- if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
- func_fatal_error "more than one -exported-symbols argument is not allowed"
- fi
- if test "X$arg" = "X-export-symbols"; then
- prev=expsyms
- else
- prev=expsyms_regex
- fi
- continue
- ;;
-
- -framework)
- prev=framework
- continue
- ;;
-
- -inst-prefix-dir)
- prev=inst_prefix
- continue
- ;;
-
- # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
- # so, if we see these flags be careful not to treat them like -L
- -L[A-Z][A-Z]*:*)
- case $with_gcc/$host in
- no/*-*-irix* | /*-*-irix*)
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- ;;
- esac
- continue
- ;;
-
- -L*)
- func_stripname '-L' '' "$arg"
- dir=$func_stripname_result
- if test -z "$dir"; then
- if test "$#" -gt 0; then
- func_fatal_error "require no space between \`-L' and \`$1'"
- else
- func_fatal_error "need path for \`-L' option"
- fi
- fi
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
- *)
- absdir=`cd "$dir" && pwd`
- test -z "$absdir" && \
- func_fatal_error "cannot determine absolute directory name of \`$dir'"
- dir="$absdir"
- ;;
- esac
- case "$deplibs " in
- *" -L$dir "*) ;;
- *)
- deplibs="$deplibs -L$dir"
- lib_search_path="$lib_search_path $dir"
- ;;
- esac
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
- testbindir=`$ECHO "X$dir" | $Xsed -e 's*/lib$*/bin*'`
- case :$dllsearchpath: in
- *":$dir:"*) ;;
- ::) dllsearchpath=$dir;;
- *) dllsearchpath="$dllsearchpath:$dir";;
- esac
- case :$dllsearchpath: in
- *":$testbindir:"*) ;;
- ::) dllsearchpath=$testbindir;;
- *) dllsearchpath="$dllsearchpath:$testbindir";;
- esac
- ;;
- esac
- continue
- ;;
-
- -l*)
- if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc*)
- # These systems don't actually have a C or math library (as such)
- continue
- ;;
- *-*-os2*)
- # These systems don't actually have a C library (as such)
- test "X$arg" = "X-lc" && continue
- ;;
- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
- # Do not include libc due to us having libc/libc_r.
- test "X$arg" = "X-lc" && continue
- ;;
- *-*-rhapsody* | *-*-darwin1.[012])
- # Rhapsody C and math libraries are in the System framework
- deplibs="$deplibs System.ltframework"
- continue
- ;;
- *-*-sco3.2v5* | *-*-sco5v6*)
- # Causes problems with __ctype
- test "X$arg" = "X-lc" && continue
- ;;
- *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
- # Compiler inserts libc in the correct place for threads to work
- test "X$arg" = "X-lc" && continue
- ;;
- esac
- elif test "X$arg" = "X-lc_r"; then
- case $host in
- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
- # Do not include libc_r directly, use -pthread flag.
- continue
- ;;
- esac
- fi
- deplibs="$deplibs $arg"
- continue
- ;;
-
- -module)
- module=yes
- continue
- ;;
-
- # Tru64 UNIX uses -model [arg] to determine the layout of C++
- # classes, name mangling, and exception handling.
- # Darwin uses the -arch flag to determine output architecture.
- -model|-arch|-isysroot)
- compiler_flags="$compiler_flags $arg"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- prev=xcompiler
- continue
- ;;
-
- -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
- compiler_flags="$compiler_flags $arg"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- case "$new_inherited_linker_flags " in
- *" $arg "*) ;;
- * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;;
- esac
- continue
- ;;
-
- -multi_module)
- single_module="${wl}-multi_module"
- continue
- ;;
-
- -no-fast-install)
- fast_install=no
- continue
- ;;
-
- -no-install)
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
- # The PATH hackery in wrapper scripts is required on Windows
- # and Darwin in order for the loader to find any dlls it needs.
- func_warning "\`-no-install' is ignored for $host"
- func_warning "assuming \`-no-fast-install' instead"
- fast_install=no
- ;;
- *) no_install=yes ;;
- esac
- continue
- ;;
-
- -no-undefined)
- allow_undefined=no
- continue
- ;;
-
- -objectlist)
- prev=objectlist
- continue
- ;;
-
- -o) prev=output ;;
-
- -precious-files-regex)
- prev=precious_regex
- continue
- ;;
-
- -release)
- prev=release
- continue
- ;;
-
- -rpath)
- prev=rpath
- continue
- ;;
-
- -R)
- prev=xrpath
- continue
- ;;
-
- -R*)
- func_stripname '-R' '' "$arg"
- dir=$func_stripname_result
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
- *)
- func_fatal_error "only absolute run-paths are allowed"
- ;;
- esac
- case "$xrpath " in
- *" $dir "*) ;;
- *) xrpath="$xrpath $dir" ;;
- esac
- continue
- ;;
-
- -shared)
- # The effects of -shared are defined in a previous loop.
- continue
- ;;
-
- -shrext)
- prev=shrext
- continue
- ;;
-
- -static | -static-libtool-libs)
- # The effects of -static are defined in a previous loop.
- # We used to do the same as -all-static on platforms that
- # didn't have a PIC flag, but the assumption that the effects
- # would be equivalent was wrong. It would break on at least
- # Digital Unix and AIX.
- continue
- ;;
-
- -thread-safe)
- thread_safe=yes
- continue
- ;;
-
- -version-info)
- prev=vinfo
- continue
- ;;
-
- -version-number)
- prev=vinfo
- vinfo_number=yes
- continue
- ;;
-
- -weak)
- prev=weak
- continue
- ;;
-
- -Wc,*)
- func_stripname '-Wc,' '' "$arg"
- args=$func_stripname_result
- arg=
- save_ifs="$IFS"; IFS=','
- for flag in $args; do
- IFS="$save_ifs"
- func_quote_for_eval "$flag"
- arg="$arg $wl$func_quote_for_eval_result"
- compiler_flags="$compiler_flags $func_quote_for_eval_result"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$arg"
- arg=$func_stripname_result
- ;;
-
- -Wl,*)
- func_stripname '-Wl,' '' "$arg"
- args=$func_stripname_result
- arg=
- save_ifs="$IFS"; IFS=','
- for flag in $args; do
- IFS="$save_ifs"
- func_quote_for_eval "$flag"
- arg="$arg $wl$func_quote_for_eval_result"
- compiler_flags="$compiler_flags $wl$func_quote_for_eval_result"
- linker_flags="$linker_flags $func_quote_for_eval_result"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$arg"
- arg=$func_stripname_result
- ;;
-
- -Xcompiler)
- prev=xcompiler
- continue
- ;;
-
- -Xlinker)
- prev=xlinker
- continue
- ;;
-
- -XCClinker)
- prev=xcclinker
- continue
- ;;
-
- # -msg_* for osf cc
- -msg_*)
- func_quote_for_eval "$arg"
- arg="$func_quote_for_eval_result"
- ;;
-
- # -64, -mips[0-9] enable 64-bit mode on the SGI compiler
- # -r[0-9][0-9]* specifies the processor on the SGI compiler
- # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler
- # +DA*, +DD* enable 64-bit mode on the HP compiler
- # -q* pass through compiler args for the IBM compiler
- # -m*, -t[45]*, -txscale* pass through architecture-specific
- # compiler args for GCC
- # -F/path gives path to uninstalled frameworks, gcc on darwin
- # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC
- # @file GCC response files
- -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*)
- func_quote_for_eval "$arg"
- arg="$func_quote_for_eval_result"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- compiler_flags="$compiler_flags $arg"
- continue
- ;;
-
- # Some other compiler flag.
- -* | +*)
- func_quote_for_eval "$arg"
- arg="$func_quote_for_eval_result"
- ;;
-
- *.$objext)
- # A standard object.
- objs="$objs $arg"
- ;;
-
- *.lo)
- # A libtool-controlled object.
-
- # Check to see that this really is a libtool object.
- if func_lalib_unsafe_p "$arg"; then
- pic_object=
- non_pic_object=
-
- # Read the .lo file
- func_source "$arg"
-
- if test -z "$pic_object" ||
- test -z "$non_pic_object" ||
- test "$pic_object" = none &&
- test "$non_pic_object" = none; then
- func_fatal_error "cannot find name of object for \`$arg'"
- fi
-
- # Extract subdirectory from the argument.
- func_dirname "$arg" "/" ""
- xdir="$func_dirname_result"
-
- if test "$pic_object" != none; then
- # Prepend the subdirectory the object is found in.
- pic_object="$xdir$pic_object"
-
- if test "$prev" = dlfiles; then
- if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
- dlfiles="$dlfiles $pic_object"
- prev=
- continue
- else
- # If libtool objects are unsupported, then we need to preload.
- prev=dlprefiles
- fi
- fi
-
- # CHECK ME: I think I busted this. -Ossama
- if test "$prev" = dlprefiles; then
- # Preload the old-style object.
- dlprefiles="$dlprefiles $pic_object"
- prev=
- fi
-
- # A PIC object.
- func_append libobjs " $pic_object"
- arg="$pic_object"
- fi
-
- # Non-PIC object.
- if test "$non_pic_object" != none; then
- # Prepend the subdirectory the object is found in.
- non_pic_object="$xdir$non_pic_object"
-
- # A standard non-PIC object
- func_append non_pic_objects " $non_pic_object"
- if test -z "$pic_object" || test "$pic_object" = none ; then
- arg="$non_pic_object"
- fi
- else
- # If the PIC object exists, use it instead.
- # $xdir was prepended to $pic_object above.
- non_pic_object="$pic_object"
- func_append non_pic_objects " $non_pic_object"
- fi
- else
- # Only an error if not doing a dry-run.
- if $opt_dry_run; then
- # Extract subdirectory from the argument.
- func_dirname "$arg" "/" ""
- xdir="$func_dirname_result"
-
- func_lo2o "$arg"
- pic_object=$xdir$objdir/$func_lo2o_result
- non_pic_object=$xdir$func_lo2o_result
- func_append libobjs " $pic_object"
- func_append non_pic_objects " $non_pic_object"
- else
- func_fatal_error "\`$arg' is not a valid libtool object"
- fi
- fi
- ;;
-
- *.$libext)
- # An archive.
- deplibs="$deplibs $arg"
- old_deplibs="$old_deplibs $arg"
- continue
- ;;
-
- *.la)
- # A libtool-controlled library.
-
- if test "$prev" = dlfiles; then
- # This library was specified with -dlopen.
- dlfiles="$dlfiles $arg"
- prev=
- elif test "$prev" = dlprefiles; then
- # The library was specified with -dlpreopen.
- dlprefiles="$dlprefiles $arg"
- prev=
- else
- deplibs="$deplibs $arg"
- fi
- continue
- ;;
-
- # Some other compiler argument.
- *)
- # Unknown arguments in both finalize_command and compile_command need
- # to be aesthetically quoted because they are evaled later.
- func_quote_for_eval "$arg"
- arg="$func_quote_for_eval_result"
- ;;
- esac # arg
-
- # Now actually substitute the argument into the commands.
- if test -n "$arg"; then
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- fi
- done # argument parsing loop
-
- test -n "$prev" && \
- func_fatal_help "the \`$prevarg' option requires an argument"
-
- if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
- eval arg=\"$export_dynamic_flag_spec\"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- fi
-
- oldlibs=
- # calculate the name of the file, without its directory
- func_basename "$output"
- outputname="$func_basename_result"
- libobjs_save="$libobjs"
-
- if test -n "$shlibpath_var"; then
- # get the directories listed in $shlibpath_var
- eval shlib_search_path=\`\$ECHO \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\`
- else
- shlib_search_path=
- fi
- eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
- eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
-
- func_dirname "$output" "/" ""
- output_objdir="$func_dirname_result$objdir"
- # Create the object directory.
- func_mkdir_p "$output_objdir"
-
- # Determine the type of output
- case $output in
- "")
- func_fatal_help "you must specify an output file"
- ;;
- *.$libext) linkmode=oldlib ;;
- *.lo | *.$objext) linkmode=obj ;;
- *.la) linkmode=lib ;;
- *) linkmode=prog ;; # Anything else should be a program.
- esac
-
- specialdeplibs=
-
- libs=
- # Find all interdependent deplibs by searching for libraries
- # that are linked more than once (e.g. -la -lb -la)
- for deplib in $deplibs; do
- if $opt_duplicate_deps ; then
- case "$libs " in
- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
- esac
- fi
- libs="$libs $deplib"
- done
-
- if test "$linkmode" = lib; then
- libs="$predeps $libs $compiler_lib_search_path $postdeps"
-
- # Compute libraries that are listed more than once in $predeps
- # $postdeps and mark them as special (i.e., whose duplicates are
- # not to be eliminated).
- pre_post_deps=
- if $opt_duplicate_compiler_generated_deps; then
- for pre_post_dep in $predeps $postdeps; do
- case "$pre_post_deps " in
- *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
- esac
- pre_post_deps="$pre_post_deps $pre_post_dep"
- done
- fi
- pre_post_deps=
- fi
-
- deplibs=
- newdependency_libs=
- newlib_search_path=
- need_relink=no # whether we're linking any uninstalled libtool libraries
- notinst_deplibs= # not-installed libtool libraries
- notinst_path= # paths that contain not-installed libtool libraries
-
- case $linkmode in
- lib)
- passes="conv dlpreopen link"
- for file in $dlfiles $dlprefiles; do
- case $file in
- *.la) ;;
- *)
- func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
- ;;
- esac
- done
- ;;
- prog)
- compile_deplibs=
- finalize_deplibs=
- alldeplibs=no
- newdlfiles=
- newdlprefiles=
- passes="conv scan dlopen dlpreopen link"
- ;;
- *) passes="conv"
- ;;
- esac
-
- for pass in $passes; do
- # The preopen pass in lib mode reverses $deplibs; put it back here
- # so that -L comes before libs that need it for instance...
- if test "$linkmode,$pass" = "lib,link"; then
- ## FIXME: Find the place where the list is rebuilt in the wrong
- ## order, and fix it there properly
- tmp_deplibs=
- for deplib in $deplibs; do
- tmp_deplibs="$deplib $tmp_deplibs"
- done
- deplibs="$tmp_deplibs"
- fi
-
- if test "$linkmode,$pass" = "lib,link" ||
- test "$linkmode,$pass" = "prog,scan"; then
- libs="$deplibs"
- deplibs=
- fi
- if test "$linkmode" = prog; then
- case $pass in
- dlopen) libs="$dlfiles" ;;
- dlpreopen) libs="$dlprefiles" ;;
- link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
- esac
- fi
- if test "$linkmode,$pass" = "lib,dlpreopen"; then
- # Collect and forward deplibs of preopened libtool libs
- for lib in $dlprefiles; do
- # Ignore non-libtool-libs
- dependency_libs=
- case $lib in
- *.la) func_source "$lib" ;;
- esac
-
- # Collect preopened libtool deplibs, except any this library
- # has declared as weak libs
- for deplib in $dependency_libs; do
- deplib_base=`$ECHO "X$deplib" | $Xsed -e "$basename"`
- case " $weak_libs " in
- *" $deplib_base "*) ;;
- *) deplibs="$deplibs $deplib" ;;
- esac
- done
- done
- libs="$dlprefiles"
- fi
- if test "$pass" = dlopen; then
- # Collect dlpreopened libraries
- save_deplibs="$deplibs"
- deplibs=
- fi
-
- for deplib in $libs; do
- lib=
- found=no
- case $deplib in
- -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
- if test "$linkmode,$pass" = "prog,link"; then
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
- compiler_flags="$compiler_flags $deplib"
- if test "$linkmode" = lib ; then
- case "$new_inherited_linker_flags " in
- *" $deplib "*) ;;
- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
- esac
- fi
- fi
- continue
- ;;
- -l*)
- if test "$linkmode" != lib && test "$linkmode" != prog; then
- func_warning "\`-l' is ignored for archives/objects"
- continue
- fi
- func_stripname '-l' '' "$deplib"
- name=$func_stripname_result
- if test "$linkmode" = lib; then
- searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
- else
- searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
- fi
- for searchdir in $searchdirs; do
- for search_ext in .la $std_shrext .so .a; do
- # Search the libtool library
- lib="$searchdir/lib${name}${search_ext}"
- if test -f "$lib"; then
- if test "$search_ext" = ".la"; then
- found=yes
- else
- found=no
- fi
- break 2
- fi
- done
- done
- if test "$found" != yes; then
- # deplib doesn't seem to be a libtool library
- if test "$linkmode,$pass" = "prog,link"; then
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
- deplibs="$deplib $deplibs"
- test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
- fi
- continue
- else # deplib is a libtool library
- # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
- # We need to do some special things here, and not later.
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $deplib "*)
- if func_lalib_p "$lib"; then
- library_names=
- old_library=
- func_source "$lib"
- for l in $old_library $library_names; do
- ll="$l"
- done
- if test "X$ll" = "X$old_library" ; then # only static version available
- found=no
- func_dirname "$lib" "" "."
- ladir="$func_dirname_result"
- lib=$ladir/$old_library
- if test "$linkmode,$pass" = "prog,link"; then
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
- deplibs="$deplib $deplibs"
- test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
- fi
- continue
- fi
- fi
- ;;
- *) ;;
- esac
- fi
- fi
- ;; # -l
- *.ltframework)
- if test "$linkmode,$pass" = "prog,link"; then
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
- deplibs="$deplib $deplibs"
- if test "$linkmode" = lib ; then
- case "$new_inherited_linker_flags " in
- *" $deplib "*) ;;
- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
- esac
- fi
- fi
- continue
- ;;
- -L*)
- case $linkmode in
- lib)
- deplibs="$deplib $deplibs"
- test "$pass" = conv && continue
- newdependency_libs="$deplib $newdependency_libs"
- func_stripname '-L' '' "$deplib"
- newlib_search_path="$newlib_search_path $func_stripname_result"
- ;;
- prog)
- if test "$pass" = conv; then
- deplibs="$deplib $deplibs"
- continue
- fi
- if test "$pass" = scan; then
- deplibs="$deplib $deplibs"
- else
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- fi
- func_stripname '-L' '' "$deplib"
- newlib_search_path="$newlib_search_path $func_stripname_result"
- ;;
- *)
- func_warning "\`-L' is ignored for archives/objects"
- ;;
- esac # linkmode
- continue
- ;; # -L
- -R*)
- if test "$pass" = link; then
- func_stripname '-R' '' "$deplib"
- dir=$func_stripname_result
- # Make sure the xrpath contains only unique directories.
- case "$xrpath " in
- *" $dir "*) ;;
- *) xrpath="$xrpath $dir" ;;
- esac
- fi
- deplibs="$deplib $deplibs"
- continue
- ;;
- *.la) lib="$deplib" ;;
- *.$libext)
- if test "$pass" = conv; then
- deplibs="$deplib $deplibs"
- continue
- fi
- case $linkmode in
- lib)
- # Linking convenience modules into shared libraries is allowed,
- # but linking other static libraries is non-portable.
- case " $dlpreconveniencelibs " in
- *" $deplib "*) ;;
- *)
- valid_a_lib=no
- case $deplibs_check_method in
- match_pattern*)
- set dummy $deplibs_check_method; shift
- match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
- if eval "\$ECHO \"X$deplib\"" 2>/dev/null | $Xsed -e 10q \
- | $EGREP "$match_pattern_regex" > /dev/null; then
- valid_a_lib=yes
- fi
- ;;
- pass_all)
- valid_a_lib=yes
- ;;
- esac
- if test "$valid_a_lib" != yes; then
- $ECHO
- $ECHO "*** Warning: Trying to link with static lib archive $deplib."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which you do not appear to have"
- $ECHO "*** because the file extensions .$libext of this argument makes me believe"
- $ECHO "*** that it is just a static archive that I should not use here."
- else
- $ECHO
- $ECHO "*** Warning: Linking the shared library $output against the"
- $ECHO "*** static library $deplib is not portable!"
- deplibs="$deplib $deplibs"
- fi
- ;;
- esac
- continue
- ;;
- prog)
- if test "$pass" != link; then
- deplibs="$deplib $deplibs"
- else
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- fi
- continue
- ;;
- esac # linkmode
- ;; # *.$libext
- *.lo | *.$objext)
- if test "$pass" = conv; then
- deplibs="$deplib $deplibs"
- elif test "$linkmode" = prog; then
- if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
- # If there is no dlopen support or we're linking statically,
- # we need to preload.
- newdlprefiles="$newdlprefiles $deplib"
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
- newdlfiles="$newdlfiles $deplib"
- fi
- fi
- continue
- ;;
- %DEPLIBS%)
- alldeplibs=yes
- continue
- ;;
- esac # case $deplib
-
- if test "$found" = yes || test -f "$lib"; then :
- else
- func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
- fi
-
- # Check to see that this really is a libtool archive.
- func_lalib_unsafe_p "$lib" \
- || func_fatal_error "\`$lib' is not a valid libtool archive"
-
- func_dirname "$lib" "" "."
- ladir="$func_dirname_result"
-
- dlname=
- dlopen=
- dlpreopen=
- libdir=
- library_names=
- old_library=
- inherited_linker_flags=
- # If the library was installed with an old release of libtool,
- # it will not redefine variables installed, or shouldnotlink
- installed=yes
- shouldnotlink=no
- avoidtemprpath=
-
-
- # Read the .la file
- func_source "$lib"
-
- # Convert "-framework foo" to "foo.ltframework"
- if test -n "$inherited_linker_flags"; then
- tmp_inherited_linker_flags=`$ECHO "X$inherited_linker_flags" | $Xsed -e 's/-framework \([^ $]*\)/\1.ltframework/g'`
- for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
- case " $new_inherited_linker_flags " in
- *" $tmp_inherited_linker_flag "*) ;;
- *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";;
- esac
- done
- fi
- dependency_libs=`$ECHO "X $dependency_libs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- if test "$linkmode,$pass" = "lib,link" ||
- test "$linkmode,$pass" = "prog,scan" ||
- { test "$linkmode" != prog && test "$linkmode" != lib; }; then
- test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
- test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
- fi
-
- if test "$pass" = conv; then
- # Only check for convenience libraries
- deplibs="$lib $deplibs"
- if test -z "$libdir"; then
- if test -z "$old_library"; then
- func_fatal_error "cannot find name of link library for \`$lib'"
- fi
- # It is a libtool convenience library, so add in its objects.
- convenience="$convenience $ladir/$objdir/$old_library"
- old_convenience="$old_convenience $ladir/$objdir/$old_library"
- elif test "$linkmode" != prog && test "$linkmode" != lib; then
- func_fatal_error "\`$lib' is not a convenience library"
- fi
- tmp_libs=
- for deplib in $dependency_libs; do
- deplibs="$deplib $deplibs"
- if $opt_duplicate_deps ; then
- case "$tmp_libs " in
- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
- esac
- fi
- tmp_libs="$tmp_libs $deplib"
- done
- continue
- fi # $pass = conv
-
-
- # Get the name of the library we link against.
- linklib=
- for l in $old_library $library_names; do
- linklib="$l"
- done
- if test -z "$linklib"; then
- func_fatal_error "cannot find name of link library for \`$lib'"
- fi
-
- # This library was specified with -dlopen.
- if test "$pass" = dlopen; then
- if test -z "$libdir"; then
- func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
- fi
- if test -z "$dlname" ||
- test "$dlopen_support" != yes ||
- test "$build_libtool_libs" = no; then
- # If there is no dlname, no dlopen support or we're linking
- # statically, we need to preload. We also need to preload any
- # dependent libraries so libltdl's deplib preloader doesn't
- # bomb out in the load deplibs phase.
- dlprefiles="$dlprefiles $lib $dependency_libs"
- else
- newdlfiles="$newdlfiles $lib"
- fi
- continue
- fi # $pass = dlopen
-
- # We need an absolute path.
- case $ladir in
- [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
- *)
- abs_ladir=`cd "$ladir" && pwd`
- if test -z "$abs_ladir"; then
- func_warning "cannot determine absolute directory name of \`$ladir'"
- func_warning "passing it literally to the linker, although it might fail"
- abs_ladir="$ladir"
- fi
- ;;
- esac
- func_basename "$lib"
- laname="$func_basename_result"
-
- # Find the relevant object directory and library name.
- if test "X$installed" = Xyes; then
- if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
- func_warning "library \`$lib' was moved."
- dir="$ladir"
- absdir="$abs_ladir"
- libdir="$abs_ladir"
- else
- dir="$libdir"
- absdir="$libdir"
- fi
- test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
- else
- if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
- dir="$ladir"
- absdir="$abs_ladir"
- # Remove this search path later
- notinst_path="$notinst_path $abs_ladir"
- else
- dir="$ladir/$objdir"
- absdir="$abs_ladir/$objdir"
- # Remove this search path later
- notinst_path="$notinst_path $abs_ladir"
- fi
- fi # $installed = yes
- func_stripname 'lib' '.la' "$laname"
- name=$func_stripname_result
-
- # This library was specified with -dlpreopen.
- if test "$pass" = dlpreopen; then
- if test -z "$libdir" && test "$linkmode" = prog; then
- func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
- fi
- # Prefer using a static library (so that no silly _DYNAMIC symbols
- # are required to link).
- if test -n "$old_library"; then
- newdlprefiles="$newdlprefiles $dir/$old_library"
- # Keep a list of preopened convenience libraries to check
- # that they are being used correctly in the link pass.
- test -z "$libdir" && \
- dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library"
- # Otherwise, use the dlname, so that lt_dlopen finds it.
- elif test -n "$dlname"; then
- newdlprefiles="$newdlprefiles $dir/$dlname"
- else
- newdlprefiles="$newdlprefiles $dir/$linklib"
- fi
- fi # $pass = dlpreopen
-
- if test -z "$libdir"; then
- # Link the convenience library
- if test "$linkmode" = lib; then
- deplibs="$dir/$old_library $deplibs"
- elif test "$linkmode,$pass" = "prog,link"; then
- compile_deplibs="$dir/$old_library $compile_deplibs"
- finalize_deplibs="$dir/$old_library $finalize_deplibs"
- else
- deplibs="$lib $deplibs" # used for prog,scan pass
- fi
- continue
- fi
-
-
- if test "$linkmode" = prog && test "$pass" != link; then
- newlib_search_path="$newlib_search_path $ladir"
- deplibs="$lib $deplibs"
-
- linkalldeplibs=no
- if test "$link_all_deplibs" != no || test -z "$library_names" ||
- test "$build_libtool_libs" = no; then
- linkalldeplibs=yes
- fi
-
- tmp_libs=
- for deplib in $dependency_libs; do
- case $deplib in
- -L*) func_stripname '-L' '' "$deplib"
- newlib_search_path="$newlib_search_path $func_stripname_result"
- ;;
- esac
- # Need to link against all dependency_libs?
- if test "$linkalldeplibs" = yes; then
- deplibs="$deplib $deplibs"
- else
- # Need to hardcode shared library paths
- # or/and link against static libraries
- newdependency_libs="$deplib $newdependency_libs"
- fi
- if $opt_duplicate_deps ; then
- case "$tmp_libs " in
- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
- esac
- fi
- tmp_libs="$tmp_libs $deplib"
- done # for deplib
- continue
- fi # $linkmode = prog...
-
- if test "$linkmode,$pass" = "prog,link"; then
- if test -n "$library_names" &&
- { { test "$prefer_static_libs" = no ||
- test "$prefer_static_libs,$installed" = "built,yes"; } ||
- test -z "$old_library"; }; then
- # We need to hardcode the library path
- if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
- # Make sure the rpath contains only unique directories.
- case "$temp_rpath:" in
- *"$absdir:"*) ;;
- *) temp_rpath="$temp_rpath$absdir:" ;;
- esac
- fi
-
- # Hardcode the library path.
- # Skip directories that are in the system default run-time
- # search path.
- case " $sys_lib_dlsearch_path " in
- *" $absdir "*) ;;
- *)
- case "$compile_rpath " in
- *" $absdir "*) ;;
- *) compile_rpath="$compile_rpath $absdir"
- esac
- ;;
- esac
- case " $sys_lib_dlsearch_path " in
- *" $libdir "*) ;;
- *)
- case "$finalize_rpath " in
- *" $libdir "*) ;;
- *) finalize_rpath="$finalize_rpath $libdir"
- esac
- ;;
- esac
- fi # $linkmode,$pass = prog,link...
-
- if test "$alldeplibs" = yes &&
- { test "$deplibs_check_method" = pass_all ||
- { test "$build_libtool_libs" = yes &&
- test -n "$library_names"; }; }; then
- # We only need to search for static libraries
- continue
- fi
- fi
-
- link_static=no # Whether the deplib will be linked statically
- use_static_libs=$prefer_static_libs
- if test "$use_static_libs" = built && test "$installed" = yes; then
- use_static_libs=no
- fi
- if test -n "$library_names" &&
- { test "$use_static_libs" = no || test -z "$old_library"; }; then
- case $host in
- *cygwin* | *mingw* | *cegcc*)
- # No point in relinking DLLs because paths are not encoded
- notinst_deplibs="$notinst_deplibs $lib"
- need_relink=no
- ;;
- *)
- if test "$installed" = no; then
- notinst_deplibs="$notinst_deplibs $lib"
- need_relink=yes
- fi
- ;;
- esac
- # This is a shared library
-
- # Warn about portability, can't link against -module's on some
- # systems (darwin). Don't bleat about dlopened modules though!
- dlopenmodule=""
- for dlpremoduletest in $dlprefiles; do
- if test "X$dlpremoduletest" = "X$lib"; then
- dlopenmodule="$dlpremoduletest"
- break
- fi
- done
- if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
- $ECHO
- if test "$linkmode" = prog; then
- $ECHO "*** Warning: Linking the executable $output against the loadable module"
- else
- $ECHO "*** Warning: Linking the shared library $output against the loadable module"
- fi
- $ECHO "*** $linklib is not portable!"
- fi
- if test "$linkmode" = lib &&
- test "$hardcode_into_libs" = yes; then
- # Hardcode the library path.
- # Skip directories that are in the system default run-time
- # search path.
- case " $sys_lib_dlsearch_path " in
- *" $absdir "*) ;;
- *)
- case "$compile_rpath " in
- *" $absdir "*) ;;
- *) compile_rpath="$compile_rpath $absdir"
- esac
- ;;
- esac
- case " $sys_lib_dlsearch_path " in
- *" $libdir "*) ;;
- *)
- case "$finalize_rpath " in
- *" $libdir "*) ;;
- *) finalize_rpath="$finalize_rpath $libdir"
- esac
- ;;
- esac
- fi
-
- if test -n "$old_archive_from_expsyms_cmds"; then
- # figure out the soname
- set dummy $library_names
- shift
- realname="$1"
- shift
- libname=`eval "\\$ECHO \"$libname_spec\""`
- # use dlname if we got it. it's perfectly good, no?
- if test -n "$dlname"; then
- soname="$dlname"
- elif test -n "$soname_spec"; then
- # bleh windows
- case $host in
- *cygwin* | mingw* | *cegcc*)
- func_arith $current - $age
- major=$func_arith_result
- versuffix="-$major"
- ;;
- esac
- eval soname=\"$soname_spec\"
- else
- soname="$realname"
- fi
-
- # Make a new name for the extract_expsyms_cmds to use
- soroot="$soname"
- func_basename "$soroot"
- soname="$func_basename_result"
- func_stripname 'lib' '.dll' "$soname"
- newlib=libimp-$func_stripname_result.a
-
- # If the library has no export list, then create one now
- if test -f "$output_objdir/$soname-def"; then :
- else
- func_verbose "extracting exported symbol list from \`$soname'"
- func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
- fi
-
- # Create $newlib
- if test -f "$output_objdir/$newlib"; then :; else
- func_verbose "generating import library for \`$soname'"
- func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
- fi
- # make sure the library variables are pointing to the new library
- dir=$output_objdir
- linklib=$newlib
- fi # test -n "$old_archive_from_expsyms_cmds"
-
- if test "$linkmode" = prog || test "$mode" != relink; then
- add_shlibpath=
- add_dir=
- add=
- lib_linked=yes
- case $hardcode_action in
- immediate | unsupported)
- if test "$hardcode_direct" = no; then
- add="$dir/$linklib"
- case $host in
- *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
- *-*-sysv4*uw2*) add_dir="-L$dir" ;;
- *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
- *-*-unixware7*) add_dir="-L$dir" ;;
- *-*-darwin* )
- # if the lib is a (non-dlopened) module then we can not
- # link against it, someone is ignoring the earlier warnings
- if /usr/bin/file -L $add 2> /dev/null |
- $GREP ": [^:]* bundle" >/dev/null ; then
- if test "X$dlopenmodule" != "X$lib"; then
- $ECHO "*** Warning: lib $linklib is a module, not a shared library"
- if test -z "$old_library" ; then
- $ECHO
- $ECHO "*** And there doesn't seem to be a static archive available"
- $ECHO "*** The link will probably fail, sorry"
- else
- add="$dir/$old_library"
- fi
- elif test -n "$old_library"; then
- add="$dir/$old_library"
- fi
- fi
- esac
- elif test "$hardcode_minus_L" = no; then
- case $host in
- *-*-sunos*) add_shlibpath="$dir" ;;
- esac
- add_dir="-L$dir"
- add="-l$name"
- elif test "$hardcode_shlibpath_var" = no; then
- add_shlibpath="$dir"
- add="-l$name"
- else
- lib_linked=no
- fi
- ;;
- relink)
- if test "$hardcode_direct" = yes &&
- test "$hardcode_direct_absolute" = no; then
- add="$dir/$linklib"
- elif test "$hardcode_minus_L" = yes; then
- add_dir="-L$dir"
- # Try looking first in the location we're being installed to.
- if test -n "$inst_prefix_dir"; then
- case $libdir in
- [\\/]*)
- add_dir="$add_dir -L$inst_prefix_dir$libdir"
- ;;
- esac
- fi
- add="-l$name"
- elif test "$hardcode_shlibpath_var" = yes; then
- add_shlibpath="$dir"
- add="-l$name"
- else
- lib_linked=no
- fi
- ;;
- *) lib_linked=no ;;
- esac
-
- if test "$lib_linked" != yes; then
- func_fatal_configuration "unsupported hardcode properties"
- fi
-
- if test -n "$add_shlibpath"; then
- case :$compile_shlibpath: in
- *":$add_shlibpath:"*) ;;
- *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
- esac
- fi
- if test "$linkmode" = prog; then
- test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
- test -n "$add" && compile_deplibs="$add $compile_deplibs"
- else
- test -n "$add_dir" && deplibs="$add_dir $deplibs"
- test -n "$add" && deplibs="$add $deplibs"
- if test "$hardcode_direct" != yes &&
- test "$hardcode_minus_L" != yes &&
- test "$hardcode_shlibpath_var" = yes; then
- case :$finalize_shlibpath: in
- *":$libdir:"*) ;;
- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
- esac
- fi
- fi
- fi
-
- if test "$linkmode" = prog || test "$mode" = relink; then
- add_shlibpath=
- add_dir=
- add=
- # Finalize command for both is simple: just hardcode it.
- if test "$hardcode_direct" = yes &&
- test "$hardcode_direct_absolute" = no; then
- add="$libdir/$linklib"
- elif test "$hardcode_minus_L" = yes; then
- add_dir="-L$libdir"
- add="-l$name"
- elif test "$hardcode_shlibpath_var" = yes; then
- case :$finalize_shlibpath: in
- *":$libdir:"*) ;;
- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
- esac
- add="-l$name"
- elif test "$hardcode_automatic" = yes; then
- if test -n "$inst_prefix_dir" &&
- test -f "$inst_prefix_dir$libdir/$linklib" ; then
- add="$inst_prefix_dir$libdir/$linklib"
- else
- add="$libdir/$linklib"
- fi
- else
- # We cannot seem to hardcode it, guess we'll fake it.
- add_dir="-L$libdir"
- # Try looking first in the location we're being installed to.
- if test -n "$inst_prefix_dir"; then
- case $libdir in
- [\\/]*)
- add_dir="$add_dir -L$inst_prefix_dir$libdir"
- ;;
- esac
- fi
- add="-l$name"
- fi
-
- if test "$linkmode" = prog; then
- test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
- test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
- else
- test -n "$add_dir" && deplibs="$add_dir $deplibs"
- test -n "$add" && deplibs="$add $deplibs"
- fi
- fi
- elif test "$linkmode" = prog; then
- # Here we assume that one of hardcode_direct or hardcode_minus_L
- # is not unsupported. This is valid on all known static and
- # shared platforms.
- if test "$hardcode_direct" != unsupported; then
- test -n "$old_library" && linklib="$old_library"
- compile_deplibs="$dir/$linklib $compile_deplibs"
- finalize_deplibs="$dir/$linklib $finalize_deplibs"
- else
- compile_deplibs="-l$name -L$dir $compile_deplibs"
- finalize_deplibs="-l$name -L$dir $finalize_deplibs"
- fi
- elif test "$build_libtool_libs" = yes; then
- # Not a shared library
- if test "$deplibs_check_method" != pass_all; then
- # We're trying link a shared library against a static one
- # but the system doesn't support it.
-
- # Just print a warning and add the library to dependency_libs so
- # that the program can be linked against the static library.
- $ECHO
- $ECHO "*** Warning: This system can not link to static lib archive $lib."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which you do not appear to have."
- if test "$module" = yes; then
- $ECHO "*** But as you try to build a module library, libtool will still create "
- $ECHO "*** a static module, that should work as long as the dlopening application"
- $ECHO "*** is linked with the -dlopen flag to resolve symbols at runtime."
- if test -z "$global_symbol_pipe"; then
- $ECHO
- $ECHO "*** However, this would only work if libtool was able to extract symbol"
- $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could"
- $ECHO "*** not find such a program. So, this module is probably useless."
- $ECHO "*** \`nm' from GNU binutils and a full rebuild may help."
- fi
- if test "$build_old_libs" = no; then
- build_libtool_libs=module
- build_old_libs=yes
- else
- build_libtool_libs=no
- fi
- fi
- else
- deplibs="$dir/$old_library $deplibs"
- link_static=yes
- fi
- fi # link shared/static library?
-
- if test "$linkmode" = lib; then
- if test -n "$dependency_libs" &&
- { test "$hardcode_into_libs" != yes ||
- test "$build_old_libs" = yes ||
- test "$link_static" = yes; }; then
- # Extract -R from dependency_libs
- temp_deplibs=
- for libdir in $dependency_libs; do
- case $libdir in
- -R*) func_stripname '-R' '' "$libdir"
- temp_xrpath=$func_stripname_result
- case " $xrpath " in
- *" $temp_xrpath "*) ;;
- *) xrpath="$xrpath $temp_xrpath";;
- esac;;
- *) temp_deplibs="$temp_deplibs $libdir";;
- esac
- done
- dependency_libs="$temp_deplibs"
- fi
-
- newlib_search_path="$newlib_search_path $absdir"
- # Link against this library
- test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
- # ... and its dependency_libs
- tmp_libs=
- for deplib in $dependency_libs; do
- newdependency_libs="$deplib $newdependency_libs"
- if $opt_duplicate_deps ; then
- case "$tmp_libs " in
- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
- esac
- fi
- tmp_libs="$tmp_libs $deplib"
- done
-
- if test "$link_all_deplibs" != no; then
- # Add the search paths of all dependency libraries
- for deplib in $dependency_libs; do
- case $deplib in
- -L*) path="$deplib" ;;
- *.la)
- func_dirname "$deplib" "" "."
- dir="$func_dirname_result"
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
- *)
- absdir=`cd "$dir" && pwd`
- if test -z "$absdir"; then
- func_warning "cannot determine absolute directory name of \`$dir'"
- absdir="$dir"
- fi
- ;;
- esac
- if $GREP "^installed=no" $deplib > /dev/null; then
- case $host in
- *-*-darwin*)
- depdepl=
- eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
- if test -n "$deplibrary_names" ; then
- for tmp in $deplibrary_names ; do
- depdepl=$tmp
- done
- if test -f "$absdir/$objdir/$depdepl" ; then
- depdepl="$absdir/$objdir/$depdepl"
- darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
- if test -z "$darwin_install_name"; then
- darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
- fi
- compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
- linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}"
- path=
- fi
- fi
- ;;
- *)
- path="-L$absdir/$objdir"
- ;;
- esac
- else
- eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
- test -z "$libdir" && \
- func_fatal_error "\`$deplib' is not a valid libtool archive"
- test "$absdir" != "$libdir" && \
- func_warning "\`$deplib' seems to be moved"
-
- path="-L$absdir"
- fi
- ;;
- esac
- case " $deplibs " in
- *" $path "*) ;;
- *) deplibs="$path $deplibs" ;;
- esac
- done
- fi # link_all_deplibs != no
- fi # linkmode = lib
- done # for deplib in $libs
- if test "$pass" = link; then
- if test "$linkmode" = "prog"; then
- compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
- finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
- else
- compiler_flags="$compiler_flags "`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- fi
- fi
- dependency_libs="$newdependency_libs"
- if test "$pass" = dlpreopen; then
- # Link the dlpreopened libraries before other libraries
- for deplib in $save_deplibs; do
- deplibs="$deplib $deplibs"
- done
- fi
- if test "$pass" != dlopen; then
- if test "$pass" != conv; then
- # Make sure lib_search_path contains only unique directories.
- lib_search_path=
- for dir in $newlib_search_path; do
- case "$lib_search_path " in
- *" $dir "*) ;;
- *) lib_search_path="$lib_search_path $dir" ;;
- esac
- done
- newlib_search_path=
- fi
-
- if test "$linkmode,$pass" != "prog,link"; then
- vars="deplibs"
- else
- vars="compile_deplibs finalize_deplibs"
- fi
- for var in $vars dependency_libs; do
- # Add libraries to $var in reverse order
- eval tmp_libs=\"\$$var\"
- new_libs=
- for deplib in $tmp_libs; do
- # FIXME: Pedantically, this is the right thing to do, so
- # that some nasty dependency loop isn't accidentally
- # broken:
- #new_libs="$deplib $new_libs"
- # Pragmatically, this seems to cause very few problems in
- # practice:
- case $deplib in
- -L*) new_libs="$deplib $new_libs" ;;
- -R*) ;;
- *)
- # And here is the reason: when a library appears more
- # than once as an explicit dependence of a library, or
- # is implicitly linked in more than once by the
- # compiler, it is considered special, and multiple
- # occurrences thereof are not removed. Compare this
- # with having the same library being listed as a
- # dependency of multiple other libraries: in this case,
- # we know (pedantically, we assume) the library does not
- # need to be listed more than once, so we keep only the
- # last copy. This is not always right, but it is rare
- # enough that we require users that really mean to play
- # such unportable linking tricks to link the library
- # using -Wl,-lname, so that libtool does not consider it
- # for duplicate removal.
- case " $specialdeplibs " in
- *" $deplib "*) new_libs="$deplib $new_libs" ;;
- *)
- case " $new_libs " in
- *" $deplib "*) ;;
- *) new_libs="$deplib $new_libs" ;;
- esac
- ;;
- esac
- ;;
- esac
- done
- tmp_libs=
- for deplib in $new_libs; do
- case $deplib in
- -L*)
- case " $tmp_libs " in
- *" $deplib "*) ;;
- *) tmp_libs="$tmp_libs $deplib" ;;
- esac
- ;;
- *) tmp_libs="$tmp_libs $deplib" ;;
- esac
- done
- eval $var=\"$tmp_libs\"
- done # for var
- fi
- # Last step: remove runtime libs from dependency_libs
- # (they stay in deplibs)
- tmp_libs=
- for i in $dependency_libs ; do
- case " $predeps $postdeps $compiler_lib_search_path " in
- *" $i "*)
- i=""
- ;;
- esac
- if test -n "$i" ; then
- tmp_libs="$tmp_libs $i"
- fi
- done
- dependency_libs=$tmp_libs
- done # for pass
- if test "$linkmode" = prog; then
- dlfiles="$newdlfiles"
- fi
- if test "$linkmode" = prog || test "$linkmode" = lib; then
- dlprefiles="$newdlprefiles"
- fi
-
- case $linkmode in
- oldlib)
- if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
- func_warning "\`-dlopen' is ignored for archives"
- fi
-
- case " $deplibs" in
- *\ -l* | *\ -L*)
- func_warning "\`-l' and \`-L' are ignored for archives" ;;
- esac
-
- test -n "$rpath" && \
- func_warning "\`-rpath' is ignored for archives"
-
- test -n "$xrpath" && \
- func_warning "\`-R' is ignored for archives"
-
- test -n "$vinfo" && \
- func_warning "\`-version-info/-version-number' is ignored for archives"
-
- test -n "$release" && \
- func_warning "\`-release' is ignored for archives"
-
- test -n "$export_symbols$export_symbols_regex" && \
- func_warning "\`-export-symbols' is ignored for archives"
-
- # Now set the variables for building old libraries.
- build_libtool_libs=no
- oldlibs="$output"
- objs="$objs$old_deplibs"
- ;;
-
- lib)
- # Make sure we only generate libraries of the form `libNAME.la'.
- case $outputname in
- lib*)
- func_stripname 'lib' '.la' "$outputname"
- name=$func_stripname_result
- eval shared_ext=\"$shrext_cmds\"
- eval libname=\"$libname_spec\"
- ;;
- *)
- test "$module" = no && \
- func_fatal_help "libtool library \`$output' must begin with \`lib'"
-
- if test "$need_lib_prefix" != no; then
- # Add the "lib" prefix for modules if required
- func_stripname '' '.la' "$outputname"
- name=$func_stripname_result
- eval shared_ext=\"$shrext_cmds\"
- eval libname=\"$libname_spec\"
- else
- func_stripname '' '.la' "$outputname"
- libname=$func_stripname_result
- fi
- ;;
- esac
-
- if test -n "$objs"; then
- if test "$deplibs_check_method" != pass_all; then
- func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
- else
- $ECHO
- $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
- $ECHO "*** objects $objs is not portable!"
- libobjs="$libobjs $objs"
- fi
- fi
-
- test "$dlself" != no && \
- func_warning "\`-dlopen self' is ignored for libtool libraries"
-
- set dummy $rpath
- shift
- test "$#" -gt 1 && \
- func_warning "ignoring multiple \`-rpath's for a libtool library"
-
- install_libdir="$1"
-
- oldlibs=
- if test -z "$rpath"; then
- if test "$build_libtool_libs" = yes; then
- # Building a libtool convenience library.
- # Some compilers have problems with a `.al' extension so
- # convenience libraries should have the same extension an
- # archive normally would.
- oldlibs="$output_objdir/$libname.$libext $oldlibs"
- build_libtool_libs=convenience
- build_old_libs=yes
- fi
-
- test -n "$vinfo" && \
- func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
-
- test -n "$release" && \
- func_warning "\`-release' is ignored for convenience libraries"
- else
-
- # Parse the version information argument.
- save_ifs="$IFS"; IFS=':'
- set dummy $vinfo 0 0 0
- shift
- IFS="$save_ifs"
-
- test -n "$7" && \
- func_fatal_help "too many parameters to \`-version-info'"
-
- # convert absolute version numbers to libtool ages
- # this retains compatibility with .la files and attempts
- # to make the code below a bit more comprehensible
-
- case $vinfo_number in
- yes)
- number_major="$1"
- number_minor="$2"
- number_revision="$3"
- #
- # There are really only two kinds -- those that
- # use the current revision as the major version
- # and those that subtract age and use age as
- # a minor version. But, then there is irix
- # which has an extra 1 added just for fun
- #
- case $version_type in
- darwin|linux|osf|windows|none)
- func_arith $number_major + $number_minor
- current=$func_arith_result
- age="$number_minor"
- revision="$number_revision"
- ;;
- freebsd-aout|freebsd-elf|sunos)
- current="$number_major"
- revision="$number_minor"
- age="0"
- ;;
- irix|nonstopux)
- func_arith $number_major + $number_minor
- current=$func_arith_result
- age="$number_minor"
- revision="$number_minor"
- lt_irix_increment=no
- ;;
- esac
- ;;
- no)
- current="$1"
- revision="$2"
- age="$3"
- ;;
- esac
-
- # Check that each of the things are valid numbers.
- case $current in
- 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
- *)
- func_error "CURRENT \`$current' must be a nonnegative integer"
- func_fatal_error "\`$vinfo' is not valid version information"
- ;;
- esac
-
- case $revision in
- 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
- *)
- func_error "REVISION \`$revision' must be a nonnegative integer"
- func_fatal_error "\`$vinfo' is not valid version information"
- ;;
- esac
-
- case $age in
- 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
- *)
- func_error "AGE \`$age' must be a nonnegative integer"
- func_fatal_error "\`$vinfo' is not valid version information"
- ;;
- esac
-
- if test "$age" -gt "$current"; then
- func_error "AGE \`$age' is greater than the current interface number \`$current'"
- func_fatal_error "\`$vinfo' is not valid version information"
- fi
-
- # Calculate the version variables.
- major=
- versuffix=
- verstring=
- case $version_type in
- none) ;;
-
- darwin)
- # Like Linux, but with the current version available in
- # verstring for coding it into the library header
- func_arith $current - $age
- major=.$func_arith_result
- versuffix="$major.$age.$revision"
- # Darwin ld doesn't like 0 for these options...
- func_arith $current + 1
- minor_current=$func_arith_result
- xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
- verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
- ;;
-
- freebsd-aout)
- major=".$current"
- versuffix=".$current.$revision";
- ;;
-
- freebsd-elf)
- major=".$current"
- versuffix=".$current"
- ;;
-
- irix | nonstopux)
- if test "X$lt_irix_increment" = "Xno"; then
- func_arith $current - $age
- else
- func_arith $current - $age + 1
- fi
- major=$func_arith_result
-
- case $version_type in
- nonstopux) verstring_prefix=nonstopux ;;
- *) verstring_prefix=sgi ;;
- esac
- verstring="$verstring_prefix$major.$revision"
-
- # Add in all the interfaces that we are compatible with.
- loop=$revision
- while test "$loop" -ne 0; do
- func_arith $revision - $loop
- iface=$func_arith_result
- func_arith $loop - 1
- loop=$func_arith_result
- verstring="$verstring_prefix$major.$iface:$verstring"
- done
-
- # Before this point, $major must not contain `.'.
- major=.$major
- versuffix="$major.$revision"
- ;;
-
- linux)
- func_arith $current - $age
- major=.$func_arith_result
- versuffix="$major.$age.$revision"
- ;;
-
- osf)
- func_arith $current - $age
- major=.$func_arith_result
- versuffix=".$current.$age.$revision"
- verstring="$current.$age.$revision"
-
- # Add in all the interfaces that we are compatible with.
- loop=$age
- while test "$loop" -ne 0; do
- func_arith $current - $loop
- iface=$func_arith_result
- func_arith $loop - 1
- loop=$func_arith_result
- verstring="$verstring:${iface}.0"
- done
-
- # Make executables depend on our current version.
- verstring="$verstring:${current}.0"
- ;;
-
- qnx)
- major=".$current"
- versuffix=".$current"
- ;;
-
- sunos)
- major=".$current"
- versuffix=".$current.$revision"
- ;;
-
- windows)
- # Use '-' rather than '.', since we only want one
- # extension on DOS 8.3 filesystems.
- func_arith $current - $age
- major=$func_arith_result
- versuffix="-$major"
- ;;
-
- *)
- func_fatal_configuration "unknown library version type \`$version_type'"
- ;;
- esac
-
- # Clear the version info if we defaulted, and they specified a release.
- if test -z "$vinfo" && test -n "$release"; then
- major=
- case $version_type in
- darwin)
- # we can't check for "0.0" in archive_cmds due to quoting
- # problems, so we reset it completely
- verstring=
- ;;
- *)
- verstring="0.0"
- ;;
- esac
- if test "$need_version" = no; then
- versuffix=
- else
- versuffix=".0.0"
- fi
- fi
-
- # Remove version info from name if versioning should be avoided
- if test "$avoid_version" = yes && test "$need_version" = no; then
- major=
- versuffix=
- verstring=""
- fi
-
- # Check to see if the archive will have undefined symbols.
- if test "$allow_undefined" = yes; then
- if test "$allow_undefined_flag" = unsupported; then
- func_warning "undefined symbols not allowed in $host shared libraries"
- build_libtool_libs=no
- build_old_libs=yes
- fi
- else
- # Don't allow undefined symbols.
- allow_undefined_flag="$no_undefined_flag"
- fi
-
- fi
-
- func_generate_dlsyms "$libname" "$libname" "yes"
- libobjs="$libobjs $symfileobj"
- test "X$libobjs" = "X " && libobjs=
-
- if test "$mode" != relink; then
- # Remove our outputs, but don't remove object files since they
- # may have been created when compiling PIC objects.
- removelist=
- tempremovelist=`$ECHO "$output_objdir/*"`
- for p in $tempremovelist; do
- case $p in
- *.$objext | *.gcno)
- ;;
- $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
- if test "X$precious_files_regex" != "X"; then
- if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
- then
- continue
- fi
- fi
- removelist="$removelist $p"
- ;;
- *) ;;
- esac
- done
- test -n "$removelist" && \
- func_show_eval "${RM}r \$removelist"
- fi
-
- # Now set the variables for building old libraries.
- if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
- oldlibs="$oldlibs $output_objdir/$libname.$libext"
-
- # Transform .lo files to .o files.
- oldobjs="$objs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP`
- fi
-
- # Eliminate all temporary directories.
- #for path in $notinst_path; do
- # lib_search_path=`$ECHO "X$lib_search_path " | $Xsed -e "s% $path % %g"`
- # deplibs=`$ECHO "X$deplibs " | $Xsed -e "s% -L$path % %g"`
- # dependency_libs=`$ECHO "X$dependency_libs " | $Xsed -e "s% -L$path % %g"`
- #done
-
- if test -n "$xrpath"; then
- # If the user specified any rpath flags, then add them.
- temp_xrpath=
- for libdir in $xrpath; do
- temp_xrpath="$temp_xrpath -R$libdir"
- case "$finalize_rpath " in
- *" $libdir "*) ;;
- *) finalize_rpath="$finalize_rpath $libdir" ;;
- esac
- done
- if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
- dependency_libs="$temp_xrpath $dependency_libs"
- fi
- fi
-
- # Make sure dlfiles contains only unique files that won't be dlpreopened
- old_dlfiles="$dlfiles"
- dlfiles=
- for lib in $old_dlfiles; do
- case " $dlprefiles $dlfiles " in
- *" $lib "*) ;;
- *) dlfiles="$dlfiles $lib" ;;
- esac
- done
-
- # Make sure dlprefiles contains only unique files
- old_dlprefiles="$dlprefiles"
- dlprefiles=
- for lib in $old_dlprefiles; do
- case "$dlprefiles " in
- *" $lib "*) ;;
- *) dlprefiles="$dlprefiles $lib" ;;
- esac
- done
-
- if test "$build_libtool_libs" = yes; then
- if test -n "$rpath"; then
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc*)
- # these systems don't actually have a c library (as such)!
- ;;
- *-*-rhapsody* | *-*-darwin1.[012])
- # Rhapsody C library is in the System framework
- deplibs="$deplibs System.ltframework"
- ;;
- *-*-netbsd*)
- # Don't link with libc until the a.out ld.so is fixed.
- ;;
- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
- # Do not include libc due to us having libc/libc_r.
- ;;
- *-*-sco3.2v5* | *-*-sco5v6*)
- # Causes problems with __ctype
- ;;
- *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
- # Compiler inserts libc in the correct place for threads to work
- ;;
- *)
- # Add libc to deplibs on all other systems if necessary.
- if test "$build_libtool_need_lc" = "yes"; then
- deplibs="$deplibs -lc"
- fi
- ;;
- esac
- fi
-
- # Transform deplibs into only deplibs that can be linked in shared.
- name_save=$name
- libname_save=$libname
- release_save=$release
- versuffix_save=$versuffix
- major_save=$major
- # I'm not sure if I'm treating the release correctly. I think
- # release should show up in the -l (ie -lgmp5) so we don't want to
- # add it in twice. Is that correct?
- release=""
- versuffix=""
- major=""
- newdeplibs=
- droppeddeps=no
- case $deplibs_check_method in
- pass_all)
- # Don't check for shared/static. Everything works.
- # This might be a little naive. We might want to check
- # whether the library exists or not. But this is on
- # osf3 & osf4 and I'm not really sure... Just
- # implementing what was already the behavior.
- newdeplibs=$deplibs
- ;;
- test_compile)
- # This code stresses the "libraries are programs" paradigm to its
- # limits. Maybe even breaks it. We compile a program, linking it
- # against the deplibs as a proxy for the library. Then we can check
- # whether they linked in statically or dynamically with ldd.
- $opt_dry_run || $RM conftest.c
- cat > conftest.c <<EOF
- int main() { return 0; }
-EOF
- $opt_dry_run || $RM conftest
- if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
- ldd_output=`ldd conftest`
- for i in $deplibs; do
- case $i in
- -l*)
- func_stripname -l '' "$i"
- name=$func_stripname_result
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $i "*)
- newdeplibs="$newdeplibs $i"
- i=""
- ;;
- esac
- fi
- if test -n "$i" ; then
- libname=`eval "\\$ECHO \"$libname_spec\""`
- deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
- set dummy $deplib_matches; shift
- deplib_match=$1
- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
- newdeplibs="$newdeplibs $i"
- else
- droppeddeps=yes
- $ECHO
- $ECHO "*** Warning: dynamic linker does not accept needed library $i."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which I believe you do not have"
- $ECHO "*** because a test_compile did reveal that the linker did not use it for"
- $ECHO "*** its dynamic dependency list that programs get resolved with at runtime."
- fi
- fi
- ;;
- *)
- newdeplibs="$newdeplibs $i"
- ;;
- esac
- done
- else
- # Error occurred in the first compile. Let's try to salvage
- # the situation: Compile a separate program for each library.
- for i in $deplibs; do
- case $i in
- -l*)
- func_stripname -l '' "$i"
- name=$func_stripname_result
- $opt_dry_run || $RM conftest
- if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
- ldd_output=`ldd conftest`
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $i "*)
- newdeplibs="$newdeplibs $i"
- i=""
- ;;
- esac
- fi
- if test -n "$i" ; then
- libname=`eval "\\$ECHO \"$libname_spec\""`
- deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
- set dummy $deplib_matches; shift
- deplib_match=$1
- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
- newdeplibs="$newdeplibs $i"
- else
- droppeddeps=yes
- $ECHO
- $ECHO "*** Warning: dynamic linker does not accept needed library $i."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which you do not appear to have"
- $ECHO "*** because a test_compile did reveal that the linker did not use this one"
- $ECHO "*** as a dynamic dependency that programs can get resolved with at runtime."
- fi
- fi
- else
- droppeddeps=yes
- $ECHO
- $ECHO "*** Warning! Library $i is needed by this library but I was not able to"
- $ECHO "*** make it link in! You will probably need to install it or some"
- $ECHO "*** library that it depends on before this library will be fully"
- $ECHO "*** functional. Installing it before continuing would be even better."
- fi
- ;;
- *)
- newdeplibs="$newdeplibs $i"
- ;;
- esac
- done
- fi
- ;;
- file_magic*)
- set dummy $deplibs_check_method; shift
- file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
- for a_deplib in $deplibs; do
- case $a_deplib in
- -l*)
- func_stripname -l '' "$a_deplib"
- name=$func_stripname_result
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $a_deplib "*)
- newdeplibs="$newdeplibs $a_deplib"
- a_deplib=""
- ;;
- esac
- fi
- if test -n "$a_deplib" ; then
- libname=`eval "\\$ECHO \"$libname_spec\""`
- for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
- potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
- for potent_lib in $potential_libs; do
- # Follow soft links.
- if ls -lLd "$potent_lib" 2>/dev/null |
- $GREP " -> " >/dev/null; then
- continue
- fi
- # The statement above tries to avoid entering an
- # endless loop below, in case of cyclic links.
- # We might still enter an endless loop, since a link
- # loop can be closed while we follow links,
- # but so what?
- potlib="$potent_lib"
- while test -h "$potlib" 2>/dev/null; do
- potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
- case $potliblink in
- [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
- *) potlib=`$ECHO "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";;
- esac
- done
- if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
- $SED -e 10q |
- $EGREP "$file_magic_regex" > /dev/null; then
- newdeplibs="$newdeplibs $a_deplib"
- a_deplib=""
- break 2
- fi
- done
- done
- fi
- if test -n "$a_deplib" ; then
- droppeddeps=yes
- $ECHO
- $ECHO "*** Warning: linker path does not have real file for library $a_deplib."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which you do not appear to have"
- $ECHO "*** because I did check the linker path looking for a file starting"
- if test -z "$potlib" ; then
- $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
- else
- $ECHO "*** with $libname and none of the candidates passed a file format test"
- $ECHO "*** using a file magic. Last file checked: $potlib"
- fi
- fi
- ;;
- *)
- # Add a -L argument.
- newdeplibs="$newdeplibs $a_deplib"
- ;;
- esac
- done # Gone through all deplibs.
- ;;
- match_pattern*)
- set dummy $deplibs_check_method; shift
- match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
- for a_deplib in $deplibs; do
- case $a_deplib in
- -l*)
- func_stripname -l '' "$a_deplib"
- name=$func_stripname_result
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $a_deplib "*)
- newdeplibs="$newdeplibs $a_deplib"
- a_deplib=""
- ;;
- esac
- fi
- if test -n "$a_deplib" ; then
- libname=`eval "\\$ECHO \"$libname_spec\""`
- for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
- potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
- for potent_lib in $potential_libs; do
- potlib="$potent_lib" # see symlink-check above in file_magic test
- if eval "\$ECHO \"X$potent_lib\"" 2>/dev/null | $Xsed -e 10q | \
- $EGREP "$match_pattern_regex" > /dev/null; then
- newdeplibs="$newdeplibs $a_deplib"
- a_deplib=""
- break 2
- fi
- done
- done
- fi
- if test -n "$a_deplib" ; then
- droppeddeps=yes
- $ECHO
- $ECHO "*** Warning: linker path does not have real file for library $a_deplib."
- $ECHO "*** I have the capability to make that library automatically link in when"
- $ECHO "*** you link to this library. But I can only do this if you have a"
- $ECHO "*** shared version of the library, which you do not appear to have"
- $ECHO "*** because I did check the linker path looking for a file starting"
- if test -z "$potlib" ; then
- $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
- else
- $ECHO "*** with $libname and none of the candidates passed a file format test"
- $ECHO "*** using a regex pattern. Last file checked: $potlib"
- fi
- fi
- ;;
- *)
- # Add a -L argument.
- newdeplibs="$newdeplibs $a_deplib"
- ;;
- esac
- done # Gone through all deplibs.
- ;;
- none | unknown | *)
- newdeplibs=""
- tmp_deplibs=`$ECHO "X $deplibs" | $Xsed \
- -e 's/ -lc$//' -e 's/ -[LR][^ ]*//g'`
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- for i in $predeps $postdeps ; do
- # can't use Xsed below, because $i might contain '/'
- tmp_deplibs=`$ECHO "X $tmp_deplibs" | $Xsed -e "s,$i,,"`
- done
- fi
- if $ECHO "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' |
- $GREP . >/dev/null; then
- $ECHO
- if test "X$deplibs_check_method" = "Xnone"; then
- $ECHO "*** Warning: inter-library dependencies are not supported in this platform."
- else
- $ECHO "*** Warning: inter-library dependencies are not known to be supported."
- fi
- $ECHO "*** All declared inter-library dependencies are being dropped."
- droppeddeps=yes
- fi
- ;;
- esac
- versuffix=$versuffix_save
- major=$major_save
- release=$release_save
- libname=$libname_save
- name=$name_save
-
- case $host in
- *-*-rhapsody* | *-*-darwin1.[012])
- # On Rhapsody replace the C library with the System framework
- newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's/ -lc / System.ltframework /'`
- ;;
- esac
-
- if test "$droppeddeps" = yes; then
- if test "$module" = yes; then
- $ECHO
- $ECHO "*** Warning: libtool could not satisfy all declared inter-library"
- $ECHO "*** dependencies of module $libname. Therefore, libtool will create"
- $ECHO "*** a static module, that should work as long as the dlopening"
- $ECHO "*** application is linked with the -dlopen flag."
- if test -z "$global_symbol_pipe"; then
- $ECHO
- $ECHO "*** However, this would only work if libtool was able to extract symbol"
- $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could"
- $ECHO "*** not find such a program. So, this module is probably useless."
- $ECHO "*** \`nm' from GNU binutils and a full rebuild may help."
- fi
- if test "$build_old_libs" = no; then
- oldlibs="$output_objdir/$libname.$libext"
- build_libtool_libs=module
- build_old_libs=yes
- else
- build_libtool_libs=no
- fi
- else
- $ECHO "*** The inter-library dependencies that have been dropped here will be"
- $ECHO "*** automatically added whenever a program is linked with this library"
- $ECHO "*** or is declared to -dlopen it."
-
- if test "$allow_undefined" = no; then
- $ECHO
- $ECHO "*** Since this library must not contain undefined symbols,"
- $ECHO "*** because either the platform does not support them or"
- $ECHO "*** it was explicitly requested with -no-undefined,"
- $ECHO "*** libtool will only create a static version of it."
- if test "$build_old_libs" = no; then
- oldlibs="$output_objdir/$libname.$libext"
- build_libtool_libs=module
- build_old_libs=yes
- else
- build_libtool_libs=no
- fi
- fi
- fi
- fi
- # Done checking deplibs!
- deplibs=$newdeplibs
- fi
- # Time to change all our "foo.ltframework" stuff back to "-framework foo"
- case $host in
- *-*-darwin*)
- newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- new_inherited_linker_flags=`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- deplibs=`$ECHO "X $deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- ;;
- esac
-
- # move library search paths that coincide with paths to not yet
- # installed libraries to the beginning of the library search list
- new_libs=
- for path in $notinst_path; do
- case " $new_libs " in
- *" -L$path/$objdir "*) ;;
- *)
- case " $deplibs " in
- *" -L$path/$objdir "*)
- new_libs="$new_libs -L$path/$objdir" ;;
- esac
- ;;
- esac
- done
- for deplib in $deplibs; do
- case $deplib in
- -L*)
- case " $new_libs " in
- *" $deplib "*) ;;
- *) new_libs="$new_libs $deplib" ;;
- esac
- ;;
- *) new_libs="$new_libs $deplib" ;;
- esac
- done
- deplibs="$new_libs"
-
- # All the library-specific variables (install_libdir is set above).
- library_names=
- old_library=
- dlname=
-
- # Test again, we may have decided not to build it any more
- if test "$build_libtool_libs" = yes; then
- if test "$hardcode_into_libs" = yes; then
- # Hardcode the library paths
- hardcode_libdirs=
- dep_rpath=
- rpath="$finalize_rpath"
- test "$mode" != relink && rpath="$compile_rpath$rpath"
- for libdir in $rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
- if test -n "$hardcode_libdir_separator"; then
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
- else
- # Just accumulate the unique libdirs.
- case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
- eval flag=\"$hardcode_libdir_flag_spec\"
- dep_rpath="$dep_rpath $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
- *" $libdir "*) ;;
- *) perm_rpath="$perm_rpath $libdir" ;;
- esac
- fi
- done
- # Substitute the hardcoded libdirs into the rpath.
- if test -n "$hardcode_libdir_separator" &&
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
- if test -n "$hardcode_libdir_flag_spec_ld"; then
- eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
- else
- eval dep_rpath=\"$hardcode_libdir_flag_spec\"
- fi
- fi
- if test -n "$runpath_var" && test -n "$perm_rpath"; then
- # We should set the runpath_var.
- rpath=
- for dir in $perm_rpath; do
- rpath="$rpath$dir:"
- done
- eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
- fi
- test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
- fi
-
- shlibpath="$finalize_shlibpath"
- test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
- if test -n "$shlibpath"; then
- eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
- fi
-
- # Get the real and link names of the library.
- eval shared_ext=\"$shrext_cmds\"
- eval library_names=\"$library_names_spec\"
- set dummy $library_names
- shift
- realname="$1"
- shift
-
- if test -n "$soname_spec"; then
- eval soname=\"$soname_spec\"
- else
- soname="$realname"
- fi
- if test -z "$dlname"; then
- dlname=$soname
- fi
-
- lib="$output_objdir/$realname"
- linknames=
- for link
- do
- linknames="$linknames $link"
- done
-
- # Use standard objects if they are pic
- test -z "$pic_flag" && libobjs=`$ECHO "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
- test "X$libobjs" = "X " && libobjs=
-
- delfiles=
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
- export_symbols="$output_objdir/$libname.uexp"
- delfiles="$delfiles $export_symbols"
- fi
-
- orig_export_symbols=
- case $host_os in
- cygwin* | mingw* | cegcc*)
- if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
- # exporting using user supplied symfile
- if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
- # and it's NOT already a .def file. Must figure out
- # which of the given symbols are data symbols and tag
- # them as such. So, trigger use of export_symbols_cmds.
- # export_symbols gets reassigned inside the "prepare
- # the list of exported symbols" if statement, so the
- # include_expsyms logic still works.
- orig_export_symbols="$export_symbols"
- export_symbols=
- always_export_symbols=yes
- fi
- fi
- ;;
- esac
-
- # Prepare the list of exported symbols
- if test -z "$export_symbols"; then
- if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
- func_verbose "generating symbol list for \`$libname.la'"
- export_symbols="$output_objdir/$libname.exp"
- $opt_dry_run || $RM $export_symbols
- cmds=$export_symbols_cmds
- save_ifs="$IFS"; IFS='~'
- for cmd in $cmds; do
- IFS="$save_ifs"
- eval cmd=\"$cmd\"
- func_len " $cmd"
- len=$func_len_result
- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
- func_show_eval "$cmd" 'exit $?'
- skipped_export=false
- else
- # The command line is too long to execute in one step.
- func_verbose "using reloadable object file for export list..."
- skipped_export=:
- # Break out early, otherwise skipped_export may be
- # set to false by a later but shorter cmd.
- break
- fi
- done
- IFS="$save_ifs"
- if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
- func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
- func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
- fi
- fi
- fi
-
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- tmp_export_symbols="$export_symbols"
- test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
- $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"'
- fi
-
- if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
- # The given exports_symbols file has to be filtered, so filter it.
- func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
- # FIXME: $output_objdir/$libname.filter potentially contains lots of
- # 's' commands which not all seds can handle. GNU sed should be fine
- # though. Also, the filter scales superlinearly with the number of
- # global variables. join(1) would be nice here, but unfortunately
- # isn't a blessed tool.
- $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
- export_symbols=$output_objdir/$libname.def
- $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
- fi
-
- tmp_deplibs=
- for test_deplib in $deplibs; do
- case " $convenience " in
- *" $test_deplib "*) ;;
- *)
- tmp_deplibs="$tmp_deplibs $test_deplib"
- ;;
- esac
- done
- deplibs="$tmp_deplibs"
-
- if test -n "$convenience"; then
- if test -n "$whole_archive_flag_spec" &&
- test "$compiler_needs_object" = yes &&
- test -z "$libobjs"; then
- # extract the archives, so we have objects to list.
- # TODO: could optimize this to just extract one archive.
- whole_archive_flag_spec=
- fi
- if test -n "$whole_archive_flag_spec"; then
- save_libobjs=$libobjs
- eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
- test "X$libobjs" = "X " && libobjs=
- else
- gentop="$output_objdir/${outputname}x"
- generated="$generated $gentop"
-
- func_extract_archives $gentop $convenience
- libobjs="$libobjs $func_extract_archives_result"
- test "X$libobjs" = "X " && libobjs=
- fi
- fi
-
- if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
- eval flag=\"$thread_safe_flag_spec\"
- linker_flags="$linker_flags $flag"
- fi
-
- # Make a backup of the uninstalled library when relinking
- if test "$mode" = relink; then
- $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
- fi
-
- # Do each of the archive commands.
- if test "$module" = yes && test -n "$module_cmds" ; then
- if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
- eval test_cmds=\"$module_expsym_cmds\"
- cmds=$module_expsym_cmds
- else
- eval test_cmds=\"$module_cmds\"
- cmds=$module_cmds
- fi
- else
- if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
- eval test_cmds=\"$archive_expsym_cmds\"
- cmds=$archive_expsym_cmds
- else
- eval test_cmds=\"$archive_cmds\"
- cmds=$archive_cmds
- fi
- fi
-
- if test "X$skipped_export" != "X:" &&
- func_len " $test_cmds" &&
- len=$func_len_result &&
- test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
- :
- else
- # The command line is too long to link in one step, link piecewise
- # or, if using GNU ld and skipped_export is not :, use a linker
- # script.
-
- # Save the value of $output and $libobjs because we want to
- # use them later. If we have whole_archive_flag_spec, we
- # want to use save_libobjs as it was before
- # whole_archive_flag_spec was expanded, because we can't
- # assume the linker understands whole_archive_flag_spec.
- # This may have to be revisited, in case too many
- # convenience libraries get linked in and end up exceeding
- # the spec.
- if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
- save_libobjs=$libobjs
- fi
- save_output=$output
- output_la=`$ECHO "X$output" | $Xsed -e "$basename"`
-
- # Clear the reloadable object creation command queue and
- # initialize k to one.
- test_cmds=
- concat_cmds=
- objlist=
- last_robj=
- k=1
-
- if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
- output=${output_objdir}/${output_la}.lnkscript
- func_verbose "creating GNU ld script: $output"
- $ECHO 'INPUT (' > $output
- for obj in $save_libobjs
- do
- $ECHO "$obj" >> $output
- done
- $ECHO ')' >> $output
- delfiles="$delfiles $output"
- elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
- output=${output_objdir}/${output_la}.lnk
- func_verbose "creating linker input file list: $output"
- : > $output
- set x $save_libobjs
- shift
- firstobj=
- if test "$compiler_needs_object" = yes; then
- firstobj="$1 "
- shift
- fi
- for obj
- do
- $ECHO "$obj" >> $output
- done
- delfiles="$delfiles $output"
- output=$firstobj\"$file_list_spec$output\"
- else
- if test -n "$save_libobjs"; then
- func_verbose "creating reloadable object files..."
- output=$output_objdir/$output_la-${k}.$objext
- eval test_cmds=\"$reload_cmds\"
- func_len " $test_cmds"
- len0=$func_len_result
- len=$len0
-
- # Loop over the list of objects to be linked.
- for obj in $save_libobjs
- do
- func_len " $obj"
- func_arith $len + $func_len_result
- len=$func_arith_result
- if test "X$objlist" = X ||
- test "$len" -lt "$max_cmd_len"; then
- func_append objlist " $obj"
- else
- # The command $test_cmds is almost too long, add a
- # command to the queue.
- if test "$k" -eq 1 ; then
- # The first file doesn't have a previous command to add.
- eval concat_cmds=\"$reload_cmds $objlist $last_robj\"
- else
- # All subsequent reloadable object files will link in
- # the last one created.
- eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj~\$RM $last_robj\"
- fi
- last_robj=$output_objdir/$output_la-${k}.$objext
- func_arith $k + 1
- k=$func_arith_result
- output=$output_objdir/$output_la-${k}.$objext
- objlist=$obj
- func_len " $last_robj"
- func_arith $len0 + $func_len_result
- len=$func_arith_result
- fi
- done
- # Handle the remaining objects by creating one last
- # reloadable object file. All subsequent reloadable object
- # files will link in the last one created.
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
- eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\"
- if test -n "$last_robj"; then
- eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
- fi
- delfiles="$delfiles $output"
-
- else
- output=
- fi
-
- if ${skipped_export-false}; then
- func_verbose "generating symbol list for \`$libname.la'"
- export_symbols="$output_objdir/$libname.exp"
- $opt_dry_run || $RM $export_symbols
- libobjs=$output
- # Append the command to create the export file.
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
- eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
- if test -n "$last_robj"; then
- eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
- fi
- fi
-
- test -n "$save_libobjs" &&
- func_verbose "creating a temporary reloadable object file: $output"
-
- # Loop through the commands generated above and execute them.
- save_ifs="$IFS"; IFS='~'
- for cmd in $concat_cmds; do
- IFS="$save_ifs"
- $opt_silent || {
- func_quote_for_expand "$cmd"
- eval "func_echo $func_quote_for_expand_result"
- }
- $opt_dry_run || eval "$cmd" || {
- lt_exit=$?
-
- # Restore the uninstalled library and exit
- if test "$mode" = relink; then
- ( cd "$output_objdir" && \
- $RM "${realname}T" && \
- $MV "${realname}U" "$realname" )
- fi
-
- exit $lt_exit
- }
- done
- IFS="$save_ifs"
-
- if test -n "$export_symbols_regex" && ${skipped_export-false}; then
- func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
- func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
- fi
- fi
-
- if ${skipped_export-false}; then
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- tmp_export_symbols="$export_symbols"
- test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
- $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"'
- fi
-
- if test -n "$orig_export_symbols"; then
- # The given exports_symbols file has to be filtered, so filter it.
- func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
- # FIXME: $output_objdir/$libname.filter potentially contains lots of
- # 's' commands which not all seds can handle. GNU sed should be fine
- # though. Also, the filter scales superlinearly with the number of
- # global variables. join(1) would be nice here, but unfortunately
- # isn't a blessed tool.
- $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
- export_symbols=$output_objdir/$libname.def
- $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
- fi
- fi
-
- libobjs=$output
- # Restore the value of output.
- output=$save_output
-
- if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
- eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
- test "X$libobjs" = "X " && libobjs=
- fi
- # Expand the library linking commands again to reset the
- # value of $libobjs for piecewise linking.
-
- # Do each of the archive commands.
- if test "$module" = yes && test -n "$module_cmds" ; then
- if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
- cmds=$module_expsym_cmds
- else
- cmds=$module_cmds
- fi
- else
- if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
- cmds=$archive_expsym_cmds
- else
- cmds=$archive_cmds
- fi
- fi
- fi
-
- if test -n "$delfiles"; then
- # Append the command to remove temporary files to $cmds.
- eval cmds=\"\$cmds~\$RM $delfiles\"
- fi
-
- # Add any objects from preloaded convenience libraries
- if test -n "$dlprefiles"; then
- gentop="$output_objdir/${outputname}x"
- generated="$generated $gentop"
-
- func_extract_archives $gentop $dlprefiles
- libobjs="$libobjs $func_extract_archives_result"
- test "X$libobjs" = "X " && libobjs=
- fi
-
- save_ifs="$IFS"; IFS='~'
- for cmd in $cmds; do
- IFS="$save_ifs"
- eval cmd=\"$cmd\"
- $opt_silent || {
- func_quote_for_expand "$cmd"
- eval "func_echo $func_quote_for_expand_result"
- }
- $opt_dry_run || eval "$cmd" || {
- lt_exit=$?
-
- # Restore the uninstalled library and exit
- if test "$mode" = relink; then
- ( cd "$output_objdir" && \
- $RM "${realname}T" && \
- $MV "${realname}U" "$realname" )
- fi
-
- exit $lt_exit
- }
- done
- IFS="$save_ifs"
-
- # Restore the uninstalled library and exit
- if test "$mode" = relink; then
- $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
-
- if test -n "$convenience"; then
- if test -z "$whole_archive_flag_spec"; then
- func_show_eval '${RM}r "$gentop"'
- fi
- fi
-
- exit $EXIT_SUCCESS
- fi
-
- # Create links to the real library.
- for linkname in $linknames; do
- if test "$realname" != "$linkname"; then
- func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
- fi
- done
-
- # If -module or -export-dynamic was specified, set the dlname.
- if test "$module" = yes || test "$export_dynamic" = yes; then
- # On all known operating systems, these are identical.
- dlname="$soname"
- fi
- fi
- ;;
-
- obj)
- if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
- func_warning "\`-dlopen' is ignored for objects"
- fi
-
- case " $deplibs" in
- *\ -l* | *\ -L*)
- func_warning "\`-l' and \`-L' are ignored for objects" ;;
- esac
-
- test -n "$rpath" && \
- func_warning "\`-rpath' is ignored for objects"
-
- test -n "$xrpath" && \
- func_warning "\`-R' is ignored for objects"
-
- test -n "$vinfo" && \
- func_warning "\`-version-info' is ignored for objects"
-
- test -n "$release" && \
- func_warning "\`-release' is ignored for objects"
-
- case $output in
- *.lo)
- test -n "$objs$old_deplibs" && \
- func_fatal_error "cannot build library object \`$output' from non-libtool objects"
-
- libobj=$output
- func_lo2o "$libobj"
- obj=$func_lo2o_result
- ;;
- *)
- libobj=
- obj="$output"
- ;;
- esac
-
- # Delete the old objects.
- $opt_dry_run || $RM $obj $libobj
-
- # Objects from convenience libraries. This assumes
- # single-version convenience libraries. Whenever we create
- # different ones for PIC/non-PIC, this we'll have to duplicate
- # the extraction.
- reload_conv_objs=
- gentop=
- # reload_cmds runs $LD directly, so let us get rid of
- # -Wl from whole_archive_flag_spec and hope we can get by with
- # turning comma into space..
- wl=
-
- if test -n "$convenience"; then
- if test -n "$whole_archive_flag_spec"; then
- eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
- reload_conv_objs=$reload_objs\ `$ECHO "X$tmp_whole_archive_flags" | $Xsed -e 's|,| |g'`
- else
- gentop="$output_objdir/${obj}x"
- generated="$generated $gentop"
-
- func_extract_archives $gentop $convenience
- reload_conv_objs="$reload_objs $func_extract_archives_result"
- fi
- fi
-
- # Create the old-style object.
- reload_objs="$objs$old_deplibs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
-
- output="$obj"
- func_execute_cmds "$reload_cmds" 'exit $?'
-
- # Exit if we aren't doing a library object file.
- if test -z "$libobj"; then
- if test -n "$gentop"; then
- func_show_eval '${RM}r "$gentop"'
- fi
-
- exit $EXIT_SUCCESS
- fi
-
- if test "$build_libtool_libs" != yes; then
- if test -n "$gentop"; then
- func_show_eval '${RM}r "$gentop"'
- fi
-
- # Create an invalid libtool object if no PIC, so that we don't
- # accidentally link it into a program.
- # $show "echo timestamp > $libobj"
- # $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
- exit $EXIT_SUCCESS
- fi
-
- if test -n "$pic_flag" || test "$pic_mode" != default; then
- # Only do commands if we really have different PIC objects.
- reload_objs="$libobjs $reload_conv_objs"
- output="$libobj"
- func_execute_cmds "$reload_cmds" 'exit $?'
- fi
-
- if test -n "$gentop"; then
- func_show_eval '${RM}r "$gentop"'
- fi
-
- exit $EXIT_SUCCESS
- ;;
-
- prog)
- case $host in
- *cygwin*) func_stripname '' '.exe' "$output"
- output=$func_stripname_result.exe;;
- esac
- test -n "$vinfo" && \
- func_warning "\`-version-info' is ignored for programs"
-
- test -n "$release" && \
- func_warning "\`-release' is ignored for programs"
-
- test "$preload" = yes \
- && test "$dlopen_support" = unknown \
- && test "$dlopen_self" = unknown \
- && test "$dlopen_self_static" = unknown && \
- func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
-
- case $host in
- *-*-rhapsody* | *-*-darwin1.[012])
- # On Rhapsody replace the C library is the System framework
- compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'`
- finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'`
- ;;
- esac
-
- case $host in
- *-*-darwin*)
- # Don't allow lazy linking, it breaks C++ global constructors
- # But is supposedly fixed on 10.4 or later (yay!).
- if test "$tagname" = CXX ; then
- case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
- 10.[0123])
- compile_command="$compile_command ${wl}-bind_at_load"
- finalize_command="$finalize_command ${wl}-bind_at_load"
- ;;
- esac
- fi
- # Time to change all our "foo.ltframework" stuff back to "-framework foo"
- compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'`
- ;;
- esac
-
-
- # move library search paths that coincide with paths to not yet
- # installed libraries to the beginning of the library search list
- new_libs=
- for path in $notinst_path; do
- case " $new_libs " in
- *" -L$path/$objdir "*) ;;
- *)
- case " $compile_deplibs " in
- *" -L$path/$objdir "*)
- new_libs="$new_libs -L$path/$objdir" ;;
- esac
- ;;
- esac
- done
- for deplib in $compile_deplibs; do
- case $deplib in
- -L*)
- case " $new_libs " in
- *" $deplib "*) ;;
- *) new_libs="$new_libs $deplib" ;;
- esac
- ;;
- *) new_libs="$new_libs $deplib" ;;
- esac
- done
- compile_deplibs="$new_libs"
-
-
- compile_command="$compile_command $compile_deplibs"
- finalize_command="$finalize_command $finalize_deplibs"
-
- if test -n "$rpath$xrpath"; then
- # If the user specified any rpath flags, then add them.
- for libdir in $rpath $xrpath; do
- # This is the magic to use -rpath.
- case "$finalize_rpath " in
- *" $libdir "*) ;;
- *) finalize_rpath="$finalize_rpath $libdir" ;;
- esac
- done
- fi
-
- # Now hardcode the library paths
- rpath=
- hardcode_libdirs=
- for libdir in $compile_rpath $finalize_rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
- if test -n "$hardcode_libdir_separator"; then
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
- else
- # Just accumulate the unique libdirs.
- case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
- eval flag=\"$hardcode_libdir_flag_spec\"
- rpath="$rpath $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
- *" $libdir "*) ;;
- *) perm_rpath="$perm_rpath $libdir" ;;
- esac
- fi
- case $host in
- *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
- testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
- case :$dllsearchpath: in
- *":$libdir:"*) ;;
- ::) dllsearchpath=$libdir;;
- *) dllsearchpath="$dllsearchpath:$libdir";;
- esac
- case :$dllsearchpath: in
- *":$testbindir:"*) ;;
- ::) dllsearchpath=$testbindir;;
- *) dllsearchpath="$dllsearchpath:$testbindir";;
- esac
- ;;
- esac
- done
- # Substitute the hardcoded libdirs into the rpath.
- if test -n "$hardcode_libdir_separator" &&
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
- eval rpath=\" $hardcode_libdir_flag_spec\"
- fi
- compile_rpath="$rpath"
-
- rpath=
- hardcode_libdirs=
- for libdir in $finalize_rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
- if test -n "$hardcode_libdir_separator"; then
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
- else
- # Just accumulate the unique libdirs.
- case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
- eval flag=\"$hardcode_libdir_flag_spec\"
- rpath="$rpath $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$finalize_perm_rpath " in
- *" $libdir "*) ;;
- *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
- esac
- fi
- done
- # Substitute the hardcoded libdirs into the rpath.
- if test -n "$hardcode_libdir_separator" &&
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
- eval rpath=\" $hardcode_libdir_flag_spec\"
- fi
- finalize_rpath="$rpath"
-
- if test -n "$libobjs" && test "$build_old_libs" = yes; then
- # Transform all the library objects into standard objects.
- compile_command=`$ECHO "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
- finalize_command=`$ECHO "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
- fi
-
- func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
-
- # template prelinking step
- if test -n "$prelink_cmds"; then
- func_execute_cmds "$prelink_cmds" 'exit $?'
- fi
-
- wrappers_required=yes
- case $host in
- *cygwin* | *mingw* )
- if test "$build_libtool_libs" != yes; then
- wrappers_required=no
- fi
- ;;
- *cegcc)
- # Disable wrappers for cegcc, we are cross compiling anyway.
- wrappers_required=no
- ;;
- *)
- if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
- wrappers_required=no
- fi
- ;;
- esac
- if test "$wrappers_required" = no; then
- # Replace the output file specification.
- compile_command=`$ECHO "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
- link_command="$compile_command$compile_rpath"
-
- # We have no uninstalled library dependencies, so finalize right now.
- exit_status=0
- func_show_eval "$link_command" 'exit_status=$?'
-
- # Delete the generated files.
- if test -f "$output_objdir/${outputname}S.${objext}"; then
- func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
- fi
-
- exit $exit_status
- fi
-
- if test -n "$compile_shlibpath$finalize_shlibpath"; then
- compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
- fi
- if test -n "$finalize_shlibpath"; then
- finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
- fi
-
- compile_var=
- finalize_var=
- if test -n "$runpath_var"; then
- if test -n "$perm_rpath"; then
- # We should set the runpath_var.
- rpath=
- for dir in $perm_rpath; do
- rpath="$rpath$dir:"
- done
- compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
- fi
- if test -n "$finalize_perm_rpath"; then
- # We should set the runpath_var.
- rpath=
- for dir in $finalize_perm_rpath; do
- rpath="$rpath$dir:"
- done
- finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
- fi
- fi
-
- if test "$no_install" = yes; then
- # We don't need to create a wrapper script.
- link_command="$compile_var$compile_command$compile_rpath"
- # Replace the output file specification.
- link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
- # Delete the old output file.
- $opt_dry_run || $RM $output
- # Link the executable and exit
- func_show_eval "$link_command" 'exit $?'
- exit $EXIT_SUCCESS
- fi
-
- if test "$hardcode_action" = relink; then
- # Fast installation is not supported
- link_command="$compile_var$compile_command$compile_rpath"
- relink_command="$finalize_var$finalize_command$finalize_rpath"
-
- func_warning "this platform does not like uninstalled shared libraries"
- func_warning "\`$output' will be relinked during installation"
- else
- if test "$fast_install" != no; then
- link_command="$finalize_var$compile_command$finalize_rpath"
- if test "$fast_install" = yes; then
- relink_command=`$ECHO "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'`
- else
- # fast_install is set to needless
- relink_command=
- fi
- else
- link_command="$compile_var$compile_command$compile_rpath"
- relink_command="$finalize_var$finalize_command$finalize_rpath"
- fi
- fi
-
- # Replace the output file specification.
- link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
-
- # Delete the old output files.
- $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
-
- func_show_eval "$link_command" 'exit $?'
-
- # Now create the wrapper script.
- func_verbose "creating $output"
-
- # Quote the relink command for shipping.
- if test -n "$relink_command"; then
- # Preserve any variables that may affect compiler behavior
- for var in $variables_saved_for_relink; do
- if eval test -z \"\${$var+set}\"; then
- relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
- elif eval var_value=\$$var; test -z "$var_value"; then
- relink_command="$var=; export $var; $relink_command"
- else
- func_quote_for_eval "$var_value"
- relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
- fi
- done
- relink_command="(cd `pwd`; $relink_command)"
- relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"`
- fi
-
- # Quote $ECHO for shipping.
- if test "X$ECHO" = "X$SHELL $progpath --fallback-echo"; then
- case $progpath in
- [\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";;
- *) qecho="$SHELL `pwd`/$progpath --fallback-echo";;
- esac
- qecho=`$ECHO "X$qecho" | $Xsed -e "$sed_quote_subst"`
- else
- qecho=`$ECHO "X$ECHO" | $Xsed -e "$sed_quote_subst"`
- fi
-
- # Only actually do things if not in dry run mode.
- $opt_dry_run || {
- # win32 will think the script is a binary if it has
- # a .exe suffix, so we strip it off here.
- case $output in
- *.exe) func_stripname '' '.exe' "$output"
- output=$func_stripname_result ;;
- esac
- # test for cygwin because mv fails w/o .exe extensions
- case $host in
- *cygwin*)
- exeext=.exe
- func_stripname '' '.exe' "$outputname"
- outputname=$func_stripname_result ;;
- *) exeext= ;;
- esac
- case $host in
- *cygwin* | *mingw* )
- func_dirname_and_basename "$output" "" "."
- output_name=$func_basename_result
- output_path=$func_dirname_result
- cwrappersource="$output_path/$objdir/lt-$output_name.c"
- cwrapper="$output_path/$output_name.exe"
- $RM $cwrappersource $cwrapper
- trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
-
- func_emit_cwrapperexe_src > $cwrappersource
-
- # The wrapper executable is built using the $host compiler,
- # because it contains $host paths and files. If cross-
- # compiling, it, like the target executable, must be
- # executed on the $host or under an emulation environment.
- $opt_dry_run || {
- $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
- $STRIP $cwrapper
- }
-
- # Now, create the wrapper script for func_source use:
- func_ltwrapper_scriptname $cwrapper
- $RM $func_ltwrapper_scriptname_result
- trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
- $opt_dry_run || {
- # note: this script will not be executed, so do not chmod.
- if test "x$build" = "x$host" ; then
- $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
- else
- func_emit_wrapper no > $func_ltwrapper_scriptname_result
- fi
- }
- ;;
- * )
- $RM $output
- trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
-
- func_emit_wrapper no > $output
- chmod +x $output
- ;;
- esac
- }
- exit $EXIT_SUCCESS
- ;;
- esac
-
- # See if we need to build an old-fashioned archive.
- for oldlib in $oldlibs; do
-
- if test "$build_libtool_libs" = convenience; then
- oldobjs="$libobjs_save $symfileobj"
- addlibs="$convenience"
- build_libtool_libs=no
- else
- if test "$build_libtool_libs" = module; then
- oldobjs="$libobjs_save"
- build_libtool_libs=no
- else
- oldobjs="$old_deplibs $non_pic_objects"
- if test "$preload" = yes && test -f "$symfileobj"; then
- oldobjs="$oldobjs $symfileobj"
- fi
- fi
- addlibs="$old_convenience"
- fi
-
- if test -n "$addlibs"; then
- gentop="$output_objdir/${outputname}x"
- generated="$generated $gentop"
-
- func_extract_archives $gentop $addlibs
- oldobjs="$oldobjs $func_extract_archives_result"
- fi
-
- # Do each command in the archive commands.
- if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
- cmds=$old_archive_from_new_cmds
- else
-
- # Add any objects from preloaded convenience libraries
- if test -n "$dlprefiles"; then
- gentop="$output_objdir/${outputname}x"
- generated="$generated $gentop"
-
- func_extract_archives $gentop $dlprefiles
- oldobjs="$oldobjs $func_extract_archives_result"
- fi
-
- # POSIX demands no paths to be encoded in archives. We have
- # to avoid creating archives with duplicate basenames if we
- # might have to extract them afterwards, e.g., when creating a
- # static archive out of a convenience library, or when linking
- # the entirety of a libtool archive into another (currently
- # not supported by libtool).
- if (for obj in $oldobjs
- do
- func_basename "$obj"
- $ECHO "$func_basename_result"
- done | sort | sort -uc >/dev/null 2>&1); then
- :
- else
- $ECHO "copying selected object files to avoid basename conflicts..."
- gentop="$output_objdir/${outputname}x"
- generated="$generated $gentop"
- func_mkdir_p "$gentop"
- save_oldobjs=$oldobjs
- oldobjs=
- counter=1
- for obj in $save_oldobjs
- do
- func_basename "$obj"
- objbase="$func_basename_result"
- case " $oldobjs " in
- " ") oldobjs=$obj ;;
- *[\ /]"$objbase "*)
- while :; do
- # Make sure we don't pick an alternate name that also
- # overlaps.
- newobj=lt$counter-$objbase
- func_arith $counter + 1
- counter=$func_arith_result
- case " $oldobjs " in
- *[\ /]"$newobj "*) ;;
- *) if test ! -f "$gentop/$newobj"; then break; fi ;;
- esac
- done
- func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
- oldobjs="$oldobjs $gentop/$newobj"
- ;;
- *) oldobjs="$oldobjs $obj" ;;
- esac
- done
- fi
- eval cmds=\"$old_archive_cmds\"
-
- func_len " $cmds"
- len=$func_len_result
- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
- cmds=$old_archive_cmds
- else
- # the command line is too long to link in one step, link in parts
- func_verbose "using piecewise archive linking..."
- save_RANLIB=$RANLIB
- RANLIB=:
- objlist=
- concat_cmds=
- save_oldobjs=$oldobjs
- oldobjs=
- # Is there a better way of finding the last object in the list?
- for obj in $save_oldobjs
- do
- last_oldobj=$obj
- done
- eval test_cmds=\"$old_archive_cmds\"
- func_len " $test_cmds"
- len0=$func_len_result
- len=$len0
- for obj in $save_oldobjs
- do
- func_len " $obj"
- func_arith $len + $func_len_result
- len=$func_arith_result
- func_append objlist " $obj"
- if test "$len" -lt "$max_cmd_len"; then
- :
- else
- # the above command should be used before it gets too long
- oldobjs=$objlist
- if test "$obj" = "$last_oldobj" ; then
- RANLIB=$save_RANLIB
- fi
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
- eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
- objlist=
- len=$len0
- fi
- done
- RANLIB=$save_RANLIB
- oldobjs=$objlist
- if test "X$oldobjs" = "X" ; then
- eval cmds=\"\$concat_cmds\"
- else
- eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
- fi
- fi
- fi
- func_execute_cmds "$cmds" 'exit $?'
- done
-
- test -n "$generated" && \
- func_show_eval "${RM}r$generated"
-
- # Now create the libtool archive.
- case $output in
- *.la)
- old_library=
- test "$build_old_libs" = yes && old_library="$libname.$libext"
- func_verbose "creating $output"
-
- # Preserve any variables that may affect compiler behavior
- for var in $variables_saved_for_relink; do
- if eval test -z \"\${$var+set}\"; then
- relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
- elif eval var_value=\$$var; test -z "$var_value"; then
- relink_command="$var=; export $var; $relink_command"
- else
- func_quote_for_eval "$var_value"
- relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
- fi
- done
- # Quote the link command for shipping.
- relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
- relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"`
- if test "$hardcode_automatic" = yes ; then
- relink_command=
- fi
-
- # Only create the output if not a dry run.
- $opt_dry_run || {
- for installed in no yes; do
- if test "$installed" = yes; then
- if test -z "$install_libdir"; then
- break
- fi
- output="$output_objdir/$outputname"i
- # Replace all uninstalled libtool libraries with the installed ones
- newdependency_libs=
- for deplib in $dependency_libs; do
- case $deplib in
- *.la)
- func_basename "$deplib"
- name="$func_basename_result"
- eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
- test -z "$libdir" && \
- func_fatal_error "\`$deplib' is not a valid libtool archive"
- newdependency_libs="$newdependency_libs $libdir/$name"
- ;;
- *) newdependency_libs="$newdependency_libs $deplib" ;;
- esac
- done
- dependency_libs="$newdependency_libs"
- newdlfiles=
-
- for lib in $dlfiles; do
- case $lib in
- *.la)
- func_basename "$lib"
- name="$func_basename_result"
- eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
- test -z "$libdir" && \
- func_fatal_error "\`$lib' is not a valid libtool archive"
- newdlfiles="$newdlfiles $libdir/$name"
- ;;
- *) newdlfiles="$newdlfiles $lib" ;;
- esac
- done
- dlfiles="$newdlfiles"
- newdlprefiles=
- for lib in $dlprefiles; do
- case $lib in
- *.la)
- # Only pass preopened files to the pseudo-archive (for
- # eventual linking with the app. that links it) if we
- # didn't already link the preopened objects directly into
- # the library:
- func_basename "$lib"
- name="$func_basename_result"
- eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
- test -z "$libdir" && \
- func_fatal_error "\`$lib' is not a valid libtool archive"
- newdlprefiles="$newdlprefiles $libdir/$name"
- ;;
- esac
- done
- dlprefiles="$newdlprefiles"
- else
- newdlfiles=
- for lib in $dlfiles; do
- case $lib in
- [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
- *) abs=`pwd`"/$lib" ;;
- esac
- newdlfiles="$newdlfiles $abs"
- done
- dlfiles="$newdlfiles"
- newdlprefiles=
- for lib in $dlprefiles; do
- case $lib in
- [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
- *) abs=`pwd`"/$lib" ;;
- esac
- newdlprefiles="$newdlprefiles $abs"
- done
- dlprefiles="$newdlprefiles"
- fi
- $RM $output
- # place dlname in correct position for cygwin
- tdlname=$dlname
- case $host,$output,$installed,$module,$dlname in
- *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
- esac
- $ECHO > $output "\
-# $outputname - a libtool library file
-# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
-#
-# Please DO NOT delete this file!
-# It is necessary for linking the library.
-
-# The name that we can dlopen(3).
-dlname='$tdlname'
-
-# Names of this library.
-library_names='$library_names'
-
-# The name of the static archive.
-old_library='$old_library'
-
-# Linker flags that can not go in dependency_libs.
-inherited_linker_flags='$new_inherited_linker_flags'
-
-# Libraries that this one depends upon.
-dependency_libs='$dependency_libs'
-
-# Names of additional weak libraries provided by this library
-weak_library_names='$weak_libs'
-
-# Version information for $libname.
-current=$current
-age=$age
-revision=$revision
-
-# Is this an already installed library?
-installed=$installed
-
-# Should we warn about portability when linking against -modules?
-shouldnotlink=$module
-
-# Files to dlopen/dlpreopen
-dlopen='$dlfiles'
-dlpreopen='$dlprefiles'
-
-# Directory that this library needs to be installed in:
-libdir='$install_libdir'"
- if test "$installed" = no && test "$need_relink" = yes; then
- $ECHO >> $output "\
-relink_command=\"$relink_command\""
- fi
- done
- }
-
- # Do a symbolic link so that the libtool archive can be found in
- # LD_LIBRARY_PATH before the program is installed.
- func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
- ;;
- esac
- exit $EXIT_SUCCESS
-}
-
-{ test "$mode" = link || test "$mode" = relink; } &&
- func_mode_link ${1+"$@"}
-
-
-# func_mode_uninstall arg...
-func_mode_uninstall ()
-{
- $opt_debug
- RM="$nonopt"
- files=
- rmforce=
- exit_status=0
-
- # This variable tells wrapper scripts just to set variables rather
- # than running their programs.
- libtool_install_magic="$magic"
-
- for arg
- do
- case $arg in
- -f) RM="$RM $arg"; rmforce=yes ;;
- -*) RM="$RM $arg" ;;
- *) files="$files $arg" ;;
- esac
- done
-
- test -z "$RM" && \
- func_fatal_help "you must specify an RM program"
-
- rmdirs=
-
- origobjdir="$objdir"
- for file in $files; do
- func_dirname "$file" "" "."
- dir="$func_dirname_result"
- if test "X$dir" = X.; then
- objdir="$origobjdir"
- else
- objdir="$dir/$origobjdir"
- fi
- func_basename "$file"
- name="$func_basename_result"
- test "$mode" = uninstall && objdir="$dir"
-
- # Remember objdir for removal later, being careful to avoid duplicates
- if test "$mode" = clean; then
- case " $rmdirs " in
- *" $objdir "*) ;;
- *) rmdirs="$rmdirs $objdir" ;;
- esac
- fi
-
- # Don't error if the file doesn't exist and rm -f was used.
- if { test -L "$file"; } >/dev/null 2>&1 ||
- { test -h "$file"; } >/dev/null 2>&1 ||
- test -f "$file"; then
- :
- elif test -d "$file"; then
- exit_status=1
- continue
- elif test "$rmforce" = yes; then
- continue
- fi
-
- rmfiles="$file"
-
- case $name in
- *.la)
- # Possibly a libtool archive, so verify it.
- if func_lalib_p "$file"; then
- func_source $dir/$name
-
- # Delete the libtool libraries and symlinks.
- for n in $library_names; do
- rmfiles="$rmfiles $objdir/$n"
- done
- test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
-
- case "$mode" in
- clean)
- case " $library_names " in
- # " " in the beginning catches empty $dlname
- *" $dlname "*) ;;
- *) rmfiles="$rmfiles $objdir/$dlname" ;;
- esac
- test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
- ;;
- uninstall)
- if test -n "$library_names"; then
- # Do each command in the postuninstall commands.
- func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
- fi
-
- if test -n "$old_library"; then
- # Do each command in the old_postuninstall commands.
- func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
- fi
- # FIXME: should reinstall the best remaining shared library.
- ;;
- esac
- fi
- ;;
-
- *.lo)
- # Possibly a libtool object, so verify it.
- if func_lalib_p "$file"; then
-
- # Read the .lo file
- func_source $dir/$name
-
- # Add PIC object to the list of files to remove.
- if test -n "$pic_object" &&
- test "$pic_object" != none; then
- rmfiles="$rmfiles $dir/$pic_object"
- fi
-
- # Add non-PIC object to the list of files to remove.
- if test -n "$non_pic_object" &&
- test "$non_pic_object" != none; then
- rmfiles="$rmfiles $dir/$non_pic_object"
- fi
- fi
- ;;
-
- *)
- if test "$mode" = clean ; then
- noexename=$name
- case $file in
- *.exe)
- func_stripname '' '.exe' "$file"
- file=$func_stripname_result
- func_stripname '' '.exe' "$name"
- noexename=$func_stripname_result
- # $file with .exe has already been added to rmfiles,
- # add $file without .exe
- rmfiles="$rmfiles $file"
- ;;
- esac
- # Do a test to see if this is a libtool program.
- if func_ltwrapper_p "$file"; then
- if func_ltwrapper_executable_p "$file"; then
- func_ltwrapper_scriptname "$file"
- relink_command=
- func_source $func_ltwrapper_scriptname_result
- rmfiles="$rmfiles $func_ltwrapper_scriptname_result"
- else
- relink_command=
- func_source $dir/$noexename
- fi
-
- # note $name still contains .exe if it was in $file originally
- # as does the version of $file that was added into $rmfiles
- rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
- if test "$fast_install" = yes && test -n "$relink_command"; then
- rmfiles="$rmfiles $objdir/lt-$name"
- fi
- if test "X$noexename" != "X$name" ; then
- rmfiles="$rmfiles $objdir/lt-${noexename}.c"
- fi
- fi
- fi
- ;;
- esac
- func_show_eval "$RM $rmfiles" 'exit_status=1'
- done
- objdir="$origobjdir"
-
- # Try to remove the ${objdir}s in the directories where we deleted files
- for dir in $rmdirs; do
- if test -d "$dir"; then
- func_show_eval "rmdir $dir >/dev/null 2>&1"
- fi
- done
-
- exit $exit_status
-}
-
-{ test "$mode" = uninstall || test "$mode" = clean; } &&
- func_mode_uninstall ${1+"$@"}
-
-test -z "$mode" && {
- help="$generic_help"
- func_fatal_help "you must specify a MODE"
-}
-
-test -z "$exec_cmd" && \
- func_fatal_help "invalid operation mode \`$mode'"
-
-if test -n "$exec_cmd"; then
- eval exec "$exec_cmd"
- exit $EXIT_FAILURE
-fi
-
-exit $exit_status
-
-
-# The TAGs below are defined such that we never get into a situation
-# in which we disable both kinds of libraries. Given conflicting
-# choices, we go for a static library, that is the most portable,
-# since we can't tell whether shared libraries were disabled because
-# the user asked for that or because the platform doesn't support
-# them. This is particularly important on AIX, because we don't
-# support having both static and shared libraries enabled at the same
-# time on that platform, so we default to a shared-only configuration.
-# If a disable-shared tag is given, we'll fallback to a static-only
-# configuration. But we'll never go from static-only to shared-only.
-
-# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
-build_libtool_libs=no
-build_old_libs=yes
-# ### END LIBTOOL TAG CONFIG: disable-shared
-
-# ### BEGIN LIBTOOL TAG CONFIG: disable-static
-build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
-# ### END LIBTOOL TAG CONFIG: disable-static
-
-# Local Variables:
-# mode:shell-script
-# sh-indentation:2
-# End:
-# vi:sw=2
-
diff --git a/scripts/training/compact-rule-table/m4/boost.m4 b/scripts/training/compact-rule-table/m4/boost.m4
deleted file mode 100644
index 6a9294c70..000000000
--- a/scripts/training/compact-rule-table/m4/boost.m4
+++ /dev/null
@@ -1,1133 +0,0 @@
-# boost.m4: Locate Boost headers and libraries for autoconf-based projects.
-# Copyright (C) 2007, 2008, 2009, 2010, 2011 Benoit Sigoure <tsuna@lrde.epita.fr>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Additional permission under section 7 of the GNU General Public
-# License, version 3 ("GPLv3"):
-#
-# If you convey this file as part of a work that contains a
-# configuration script generated by Autoconf, you may do so under
-# terms of your choice.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-m4_define([_BOOST_SERIAL], [m4_translit([
-# serial 16
-], [#
-], [])])
-
-# Original sources can be found at http://github.com/tsuna/boost.m4
-# You can fetch the latest version of the script by doing:
-# wget http://github.com/tsuna/boost.m4/raw/master/build-aux/boost.m4
-
-# ------ #
-# README #
-# ------ #
-
-# This file provides several macros to use the various Boost libraries.
-# The first macro is BOOST_REQUIRE. It will simply check if it's possible to
-# find the Boost headers of a given (optional) minimum version and it will
-# define BOOST_CPPFLAGS accordingly. It will add an option --with-boost to
-# your configure so that users can specify non standard locations.
-# If the user's environment contains BOOST_ROOT and --with-boost was not
-# specified, --with-boost=$BOOST_ROOT is implicitly used.
-# For more README and documentation, go to http://github.com/tsuna/boost.m4
-# Note: THESE MACROS ASSUME THAT YOU USE LIBTOOL. If you don't, don't worry,
-# simply read the README, it will show you what to do step by step.
-
-m4_pattern_forbid([^_?(BOOST|Boost)_])
-
-
-# _BOOST_SED_CPP(SED-PROGRAM, PROGRAM,
-# [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
-# --------------------------------------------------------
-# Same as AC_EGREP_CPP, but leave the result in conftest.i.
-#
-# SED-PROGRAM is *not* overquoted, as in AC_EGREP_CPP. It is expanded
-# in double-quotes, so escape your double quotes.
-#
-# It could be useful to turn this into a macro which extracts the
-# value of any macro.
-m4_define([_BOOST_SED_CPP],
-[AC_LANG_PREPROC_REQUIRE()dnl
-AC_REQUIRE([AC_PROG_SED])dnl
-AC_LANG_CONFTEST([AC_LANG_SOURCE([[$2]])])
-AS_IF([dnl eval is necessary to expand ac_cpp.
-dnl Ultrix and Pyramid sh refuse to redirect output of eval, so use subshell.
-dnl Beware of Windows end-of-lines, for instance if we are running
-dnl some Windows programs under Wine. In that case, boost/version.hpp
-dnl is certainly using "\r\n", but the regular Unix shell will only
-dnl strip `\n' with backquotes, not the `\r'. This results in
-dnl boost_cv_lib_version='1_37\r' for instance, which breaks
-dnl everything else.
-dnl Cannot use 'dnl' after [$4] because a trailing dnl may break AC_CACHE_CHECK
-(eval "$ac_cpp conftest.$ac_ext") 2>&AS_MESSAGE_LOG_FD |
- tr -d '\r' |
- $SED -n -e "$1" >conftest.i 2>&1],
- [$3],
- [$4])
-rm -rf conftest*
-])# AC_EGREP_CPP
-
-
-
-# BOOST_REQUIRE([VERSION], [ACTION-IF-NOT-FOUND])
-# -----------------------------------------------
-# Look for Boost. If version is given, it must either be a literal of the form
-# "X.Y.Z" where X, Y and Z are integers (the ".Z" part being optional) or a
-# variable "$var".
-# Defines the value BOOST_CPPFLAGS. This macro only checks for headers with
-# the required version, it does not check for any of the Boost libraries.
-# On # success, defines HAVE_BOOST. On failure, calls the optional
-# ACTION-IF-NOT-FOUND action if one was supplied.
-# Otherwise aborts with an error message.
-AC_DEFUN([BOOST_REQUIRE],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_PROG_GREP])dnl
-echo "$as_me: this is boost.m4[]_BOOST_SERIAL" >&AS_MESSAGE_LOG_FD
-boost_save_IFS=$IFS
-boost_version_req=$1
-IFS=.
-set x $boost_version_req 0 0 0
-IFS=$boost_save_IFS
-shift
-boost_version_req=`expr "$[1]" '*' 100000 + "$[2]" '*' 100 + "$[3]"`
-boost_version_req_string=$[1].$[2].$[3]
-AC_ARG_WITH([boost],
- [AS_HELP_STRING([--with-boost=DIR],
- [prefix of Boost $1 @<:@guess@:>@])])dnl
-AC_ARG_VAR([BOOST_ROOT],[Location of Boost installation])dnl
-# If BOOST_ROOT is set and the user has not provided a value to
-# --with-boost, then treat BOOST_ROOT as if it the user supplied it.
-if test x"$BOOST_ROOT" != x; then
- if test x"$with_boost" = x; then
- AC_MSG_NOTICE([Detected BOOST_ROOT; continuing with --with-boost=$BOOST_ROOT])
- with_boost=$BOOST_ROOT
- else
- AC_MSG_NOTICE([Detected BOOST_ROOT=$BOOST_ROOT, but overridden by --with-boost=$with_boost])
- fi
-fi
-AC_SUBST([DISTCHECK_CONFIGURE_FLAGS],
- ["$DISTCHECK_CONFIGURE_FLAGS '--with-boost=$with_boost'"])dnl
-boost_save_CPPFLAGS=$CPPFLAGS
- AC_CACHE_CHECK([for Boost headers version >= $boost_version_req_string],
- [boost_cv_inc_path],
- [boost_cv_inc_path=no
-AC_LANG_PUSH([C++])dnl
-m4_pattern_allow([^BOOST_VERSION$])dnl
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([[#include <boost/version.hpp>
-#if !defined BOOST_VERSION
-# error BOOST_VERSION is not defined
-#elif BOOST_VERSION < $boost_version_req
-# error Boost headers version < $boost_version_req
-#endif
-]])])
- # If the user provided a value to --with-boost, use it and only it.
- case $with_boost in #(
- ''|yes) set x '' /opt/local/include /usr/local/include /opt/include \
- /usr/include C:/Boost/include;; #(
- *) set x "$with_boost/include" "$with_boost";;
- esac
- shift
- for boost_dir
- do
- # Without --layout=system, Boost (or at least some versions) installs
- # itself in <prefix>/include/boost-<version>. This inner loop helps to
- # find headers in such directories.
- #
- # Any ${boost_dir}/boost-x_xx directories are searched in reverse version
- # order followed by ${boost_dir}. The final '.' is a sentinel for
- # searching $boost_dir" itself. Entries are whitespace separated.
- #
- # I didn't indent this loop on purpose (to avoid over-indented code)
- boost_layout_system_search_list=`cd "$boost_dir" 2>/dev/null \
- && ls -1 | "${GREP}" '^boost-' | sort -rn -t- -k2 \
- && echo .`
- for boost_inc in $boost_layout_system_search_list
- do
- if test x"$boost_inc" != x.; then
- boost_inc="$boost_dir/$boost_inc"
- else
- boost_inc="$boost_dir" # Uses sentinel in boost_layout_system_search_list
- fi
- if test x"$boost_inc" != x; then
- # We are going to check whether the version of Boost installed
- # in $boost_inc is usable by running a compilation that
- # #includes it. But if we pass a -I/some/path in which Boost
- # is not installed, the compiler will just skip this -I and
- # use other locations (either from CPPFLAGS, or from its list
- # of system include directories). As a result we would use
- # header installed on the machine instead of the /some/path
- # specified by the user. So in that precise case (trying
- # $boost_inc), make sure the version.hpp exists.
- #
- # Use test -e as there can be symlinks.
- test -e "$boost_inc/boost/version.hpp" || continue
- CPPFLAGS="$CPPFLAGS -I$boost_inc"
- fi
- AC_COMPILE_IFELSE([], [boost_cv_inc_path=yes], [boost_cv_version=no])
- if test x"$boost_cv_inc_path" = xyes; then
- if test x"$boost_inc" != x; then
- boost_cv_inc_path=$boost_inc
- fi
- break 2
- fi
- done
- done
-AC_LANG_POP([C++])dnl
- ])
- case $boost_cv_inc_path in #(
- no)
- boost_errmsg="cannot find Boost headers version >= $boost_version_req_string"
- m4_if([$2], [], [AC_MSG_ERROR([$boost_errmsg])],
- [AC_MSG_NOTICE([$boost_errmsg])])
- $2
- ;;#(
- yes)
- BOOST_CPPFLAGS=
- ;;#(
- *)
- AC_SUBST([BOOST_CPPFLAGS], ["-I$boost_cv_inc_path"])dnl
- ;;
- esac
- if test x"$boost_cv_inc_path" != xno; then
- AC_DEFINE([HAVE_BOOST], [1],
- [Defined if the requested minimum BOOST version is satisfied])
- AC_CACHE_CHECK([for Boost's header version],
- [boost_cv_lib_version],
- [m4_pattern_allow([^BOOST_LIB_VERSION$])dnl
- _BOOST_SED_CPP([/^boost-lib-version = /{s///;s/\"//g;p;q;}],
- [#include <boost/version.hpp>
-boost-lib-version = BOOST_LIB_VERSION],
- [boost_cv_lib_version=`cat conftest.i`])])
- # e.g. "134" for 1_34_1 or "135" for 1_35
- boost_major_version=`echo "$boost_cv_lib_version" | sed 's/_//;s/_.*//'`
- case $boost_major_version in #(
- '' | *[[!0-9]]*)
- AC_MSG_ERROR([invalid value: boost_major_version=$boost_major_version])
- ;;
- esac
-fi
-CPPFLAGS=$boost_save_CPPFLAGS
-])# BOOST_REQUIRE
-
-# BOOST_STATIC()
-# --------------
-# Add the "--enable-static-boost" configure argument. If this argument is given
-# on the command line, static versions of the libraries will be looked up.
-AC_DEFUN([BOOST_STATIC],
- [AC_ARG_ENABLE([static-boost],
- [AC_HELP_STRING([--enable-static-boost],
- [Prefer the static boost libraries over the shared ones [no]])],
- [enable_static_boost=yes],
- [enable_static_boost=no])])# BOOST_STATIC
-
-# BOOST_FIND_HEADER([HEADER-NAME], [ACTION-IF-NOT-FOUND], [ACTION-IF-FOUND])
-# --------------------------------------------------------------------------
-# Wrapper around AC_CHECK_HEADER for Boost headers. Useful to check for
-# some parts of the Boost library which are only made of headers and don't
-# require linking (such as Boost.Foreach).
-#
-# Default ACTION-IF-NOT-FOUND: Fail with a fatal error unless Boost couldn't be
-# found in the first place, in which case by default a notice is issued to the
-# user. Presumably if we haven't died already it's because it's OK to not have
-# Boost, which is why only a notice is issued instead of a hard error.
-#
-# Default ACTION-IF-FOUND: define the preprocessor symbol HAVE_<HEADER-NAME> in
-# case of success # (where HEADER-NAME is written LIKE_THIS, e.g.,
-# HAVE_BOOST_FOREACH_HPP).
-AC_DEFUN([BOOST_FIND_HEADER],
-[AC_REQUIRE([BOOST_REQUIRE])dnl
-if test x"$boost_cv_inc_path" = xno; then
- m4_default([$2], [AC_MSG_NOTICE([Boost not available, not searching for $1])])
-else
-AC_LANG_PUSH([C++])dnl
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-AC_CHECK_HEADER([$1],
- [m4_default([$3], [AC_DEFINE(AS_TR_CPP([HAVE_$1]), [1],
- [Define to 1 if you have <$1>])])],
- [m4_default([$2], [AC_MSG_ERROR([cannot find $1])])])
-CPPFLAGS=$boost_save_CPPFLAGS
-AC_LANG_POP([C++])dnl
-fi
-])# BOOST_FIND_HEADER
-
-
-# BOOST_FIND_LIB([LIB-NAME], [PREFERRED-RT-OPT], [HEADER-NAME], [CXX-TEST],
-# [CXX-PROLOGUE])
-# -------------------------------------------------------------------------
-# Look for the Boost library LIB-NAME (e.g., LIB-NAME = `thread', for
-# libboost_thread). Check that HEADER-NAME works and check that
-# libboost_LIB-NAME can link with the code CXX-TEST. The optional argument
-# CXX-PROLOGUE can be used to include some C++ code before the `main'
-# function.
-#
-# Invokes BOOST_FIND_HEADER([HEADER-NAME]) (see above).
-#
-# Boost libraries typically come compiled with several flavors (with different
-# runtime options) so PREFERRED-RT-OPT is the preferred suffix. A suffix is one
-# or more of the following letters: sgdpn (in that order). s = static
-# runtime, d = debug build, g = debug/diagnostic runtime, p = STLPort build,
-# n = (unsure) STLPort build without iostreams from STLPort (it looks like `n'
-# must always be used along with `p'). Additionally, PREFERRED-RT-OPT can
-# start with `mt-' to indicate that there is a preference for multi-thread
-# builds. Some sample values for PREFERRED-RT-OPT: (nothing), mt, d, mt-d, gdp
-# ... If you want to make sure you have a specific version of Boost
-# (eg, >= 1.33) you *must* invoke BOOST_REQUIRE before this macro.
-AC_DEFUN([BOOST_FIND_LIB],
-[AC_REQUIRE([BOOST_REQUIRE])dnl
-AC_REQUIRE([_BOOST_FIND_COMPILER_TAG])dnl
-AC_REQUIRE([BOOST_STATIC])dnl
-AC_REQUIRE([_BOOST_GUESS_WHETHER_TO_USE_MT])dnl
-if test x"$boost_cv_inc_path" = xno; then
- AC_MSG_NOTICE([Boost not available, not searching for the Boost $1 library])
-else
-dnl The else branch is huge and wasn't intended on purpose.
-AC_LANG_PUSH([C++])dnl
-AS_VAR_PUSHDEF([Boost_lib], [boost_cv_lib_$1])dnl
-AS_VAR_PUSHDEF([Boost_lib_LDFLAGS], [boost_cv_lib_$1_LDFLAGS])dnl
-AS_VAR_PUSHDEF([Boost_lib_LDPATH], [boost_cv_lib_$1_LDPATH])dnl
-AS_VAR_PUSHDEF([Boost_lib_LIBS], [boost_cv_lib_$1_LIBS])dnl
-BOOST_FIND_HEADER([$3])
-boost_save_CPPFLAGS=$CPPFLAGS
-CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
-# Now let's try to find the library. The algorithm is as follows: first look
-# for a given library name according to the user's PREFERRED-RT-OPT. For each
-# library name, we prefer to use the ones that carry the tag (toolset name).
-# Each library is searched through the various standard paths were Boost is
-# usually installed. If we can't find the standard variants, we try to
-# enforce -mt (for instance on MacOSX, libboost_threads.dylib doesn't exist
-# but there's -obviously- libboost_threads-mt.dylib).
-AC_CACHE_CHECK([for the Boost $1 library], [Boost_lib],
- [Boost_lib=no
- case "$2" in #(
- mt | mt-) boost_mt=-mt; boost_rtopt=;; #(
- mt* | mt-*) boost_mt=-mt; boost_rtopt=`expr "X$2" : 'Xmt-*\(.*\)'`;; #(
- *) boost_mt=; boost_rtopt=$2;;
- esac
- if test $enable_static_boost = yes; then
- boost_rtopt="s$boost_rtopt"
- fi
- # Find the proper debug variant depending on what we've been asked to find.
- case $boost_rtopt in #(
- *d*) boost_rt_d=$boost_rtopt;; #(
- *[[sgpn]]*) # Insert the `d' at the right place (in between `sg' and `pn')
- boost_rt_d=`echo "$boost_rtopt" | sed 's/\(s*g*\)\(p*n*\)/\1\2/'`;; #(
- *) boost_rt_d='-d';;
- esac
- # If the PREFERRED-RT-OPT are not empty, prepend a `-'.
- test -n "$boost_rtopt" && boost_rtopt="-$boost_rtopt"
- $boost_guess_use_mt && boost_mt=-mt
- # Look for the abs path the static archive.
- # $libext is computed by Libtool but let's make sure it's non empty.
- test -z "$libext" &&
- AC_MSG_ERROR([the libext variable is empty, did you invoke Libtool?])
- boost_save_ac_objext=$ac_objext
- # Generate the test file.
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([#include <$3>
-$5], [$4])])
-dnl Optimization hacks: compiling C++ is slow, especially with Boost. What
-dnl we're trying to do here is guess the right combination of link flags
-dnl (LIBS / LDFLAGS) to use a given library. This can take several
-dnl iterations before it succeeds and is thus *very* slow. So what we do
-dnl instead is that we compile the code first (and thus get an object file,
-dnl typically conftest.o). Then we try various combinations of link flags
-dnl until we succeed to link conftest.o in an executable. The problem is
-dnl that the various TRY_LINK / COMPILE_IFELSE macros of Autoconf always
-dnl remove all the temporary files including conftest.o. So the trick here
-dnl is to temporarily change the value of ac_objext so that conftest.o is
-dnl preserved accross tests. This is obviously fragile and I will burn in
-dnl hell for not respecting Autoconf's documented interfaces, but in the
-dnl mean time, it optimizes the macro by a factor of 5 to 30.
-dnl Another small optimization: the first argument of AC_COMPILE_IFELSE left
-dnl empty because the test file is generated only once above (before we
-dnl start the for loops).
- AC_COMPILE_IFELSE([],
- [ac_objext=do_not_rm_me_plz],
- [AC_MSG_ERROR([cannot compile a test that uses Boost $1])])
- ac_objext=$boost_save_ac_objext
- boost_failed_libs=
-# Don't bother to ident the 6 nested for loops, only the 2 innermost ones
-# matter.
-for boost_tag_ in -$boost_cv_lib_tag ''; do
-for boost_ver_ in -$boost_cv_lib_version ''; do
-for boost_mt_ in $boost_mt -mt ''; do
-for boost_rtopt_ in $boost_rtopt '' -d; do
- for boost_lib in \
- boost_$1$boost_tag_$boost_mt_$boost_rtopt_$boost_ver_ \
- boost_$1$boost_tag_$boost_rtopt_$boost_ver_ \
- boost_$1$boost_tag_$boost_mt_$boost_ver_ \
- boost_$1$boost_tag_$boost_ver_
- do
- # Avoid testing twice the same lib
- case $boost_failed_libs in #(
- *@$boost_lib@*) continue;;
- esac
- # If with_boost is empty, we'll search in /lib first, which is not quite
- # right so instead we'll try to a location based on where the headers are.
- boost_tmp_lib=$with_boost
- test x"$with_boost" = x && boost_tmp_lib=${boost_cv_inc_path%/include}
- for boost_ldpath in "$boost_tmp_lib/lib" '' \
- /opt/local/lib* /usr/local/lib* /opt/lib* /usr/lib* \
- "$with_boost" C:/Boost/lib /lib*
- do
- test -e "$boost_ldpath" || continue
- boost_save_LDFLAGS=$LDFLAGS
- # Are we looking for a static library?
- case $boost_ldpath:$boost_rtopt_ in #(
- *?*:*s*) # Yes (Non empty boost_ldpath + s in rt opt)
- Boost_lib_LIBS="$boost_ldpath/lib$boost_lib.$libext"
- test -e "$Boost_lib_LIBS" || continue;; #(
- *) # No: use -lboost_foo to find the shared library.
- Boost_lib_LIBS="-l$boost_lib";;
- esac
- boost_save_LIBS=$LIBS
- LIBS="$Boost_lib_LIBS $LIBS"
- test x"$boost_ldpath" != x && LDFLAGS="$LDFLAGS -L$boost_ldpath"
-dnl First argument of AC_LINK_IFELSE left empty because the test file is
-dnl generated only once above (before we start the for loops).
- _BOOST_AC_LINK_IFELSE([],
- [Boost_lib=yes], [Boost_lib=no])
- ac_objext=$boost_save_ac_objext
- LDFLAGS=$boost_save_LDFLAGS
- LIBS=$boost_save_LIBS
- if test x"$Boost_lib" = xyes; then
- Boost_lib_LDFLAGS="-L$boost_ldpath -Wl,-R$boost_ldpath"
- Boost_lib_LDPATH="$boost_ldpath"
- break 6
- else
- boost_failed_libs="$boost_failed_libs@$boost_lib@"
- fi
- done
- done
-done
-done
-done
-done
-rm -f conftest.$ac_objext
-])
-case $Boost_lib in #(
- no) _AC_MSG_LOG_CONFTEST
- AC_MSG_ERROR([cannot find the flags to link with Boost $1])
- ;;
-esac
-AC_SUBST(AS_TR_CPP([BOOST_$1_LDFLAGS]), [$Boost_lib_LDFLAGS])dnl
-AC_SUBST(AS_TR_CPP([BOOST_$1_LDPATH]), [$Boost_lib_LDPATH])dnl
-AC_SUBST([BOOST_LDPATH], [$Boost_lib_LDPATH])dnl
-AC_SUBST(AS_TR_CPP([BOOST_$1_LIBS]), [$Boost_lib_LIBS])dnl
-CPPFLAGS=$boost_save_CPPFLAGS
-AS_VAR_POPDEF([Boost_lib])dnl
-AS_VAR_POPDEF([Boost_lib_LDFLAGS])dnl
-AS_VAR_POPDEF([Boost_lib_LDPATH])dnl
-AS_VAR_POPDEF([Boost_lib_LIBS])dnl
-AC_LANG_POP([C++])dnl
-fi
-])# BOOST_FIND_LIB
-
-
-# --------------------------------------- #
-# Checks for the various Boost libraries. #
-# --------------------------------------- #
-
-# List of boost libraries: http://www.boost.org/libs/libraries.htm
-# The page http://beta.boost.org/doc/libs is useful: it gives the first release
-# version of each library (among other things).
-
-# BOOST_DEFUN(LIBRARY, CODE)
-# --------------------------
-# Define BOOST_<LIBRARY-UPPERCASE> as a macro that runs CODE.
-#
-# Use indir to avoid the warning on underquoted macro name given to AC_DEFUN.
-m4_define([BOOST_DEFUN],
-[m4_indir([AC_DEFUN],
- m4_toupper([BOOST_$1]),
-[m4_pushdef([BOOST_Library], [$1])dnl
-$2
-m4_popdef([BOOST_Library])dnl
-])
-])
-
-# BOOST_ARRAY()
-# -------------
-# Look for Boost.Array
-BOOST_DEFUN([Array],
-[BOOST_FIND_HEADER([boost/array.hpp])])
-
-
-# BOOST_ASIO()
-# ------------
-# Look for Boost.Asio (new in Boost 1.35).
-BOOST_DEFUN([Asio],
-[AC_REQUIRE([BOOST_SYSTEM])dnl
-BOOST_FIND_HEADER([boost/asio.hpp])])
-
-
-# BOOST_BIND()
-# ------------
-# Look for Boost.Bind
-BOOST_DEFUN([Bind],
-[BOOST_FIND_HEADER([boost/bind.hpp])])
-
-
-# BOOST_CONVERSION()
-# ------------------
-# Look for Boost.Conversion (cast / lexical_cast)
-BOOST_DEFUN([Conversion],
-[BOOST_FIND_HEADER([boost/cast.hpp])
-BOOST_FIND_HEADER([boost/lexical_cast.hpp])
-])# BOOST_CONVERSION
-
-
-# BOOST_DATE_TIME([PREFERRED-RT-OPT])
-# -----------------------------------
-# Look for Boost.Date_Time. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Date_Time],
-[BOOST_FIND_LIB([date_time], [$1],
- [boost/date_time/posix_time/posix_time.hpp],
- [boost::posix_time::ptime t;])
-])# BOOST_DATE_TIME
-
-
-# BOOST_FILESYSTEM([PREFERRED-RT-OPT])
-# ------------------------------------
-# Look for Boost.Filesystem. For the documentation of PREFERRED-RT-OPT, see
-# the documentation of BOOST_FIND_LIB above.
-# Do not check for boost/filesystem.hpp because this file was introduced in
-# 1.34.
-BOOST_DEFUN([Filesystem],
-[# Do we have to check for Boost.System? This link-time dependency was
-# added as of 1.35.0. If we have a version <1.35, we must not attempt to
-# find Boost.System as it didn't exist by then.
-if test $boost_major_version -ge 135; then
-BOOST_SYSTEM([$1])
-fi # end of the Boost.System check.
-boost_filesystem_save_LIBS=$LIBS
-boost_filesystem_save_LDFLAGS=$LDFLAGS
-m4_pattern_allow([^BOOST_SYSTEM_(LIBS|LDFLAGS)$])dnl
-LIBS="$LIBS $BOOST_SYSTEM_LIBS"
-LDFLAGS="$LDFLAGS $BOOST_SYSTEM_LDFLAGS"
-BOOST_FIND_LIB([filesystem], [$1],
- [boost/filesystem/path.hpp], [boost::filesystem::path p;])
-if test $enable_static_boost = yes && test $boost_major_version -ge 135; then
- AC_SUBST([BOOST_FILESYSTEM_LIBS], ["$BOOST_FILESYSTEM_LIBS $BOOST_SYSTEM_LIBS"])
-fi
-LIBS=$boost_filesystem_save_LIBS
-LDFLAGS=$boost_filesystem_save_LDFLAGS
-])# BOOST_FILESYSTEM
-
-
-# BOOST_FOREACH()
-# ---------------
-# Look for Boost.Foreach
-BOOST_DEFUN([Foreach],
-[BOOST_FIND_HEADER([boost/foreach.hpp])])
-
-
-# BOOST_FORMAT()
-# --------------
-# Look for Boost.Format
-# Note: we can't check for boost/format/format_fwd.hpp because the header isn't
-# standalone. It can't be compiled because it triggers the following error:
-# boost/format/detail/config_macros.hpp:88: error: 'locale' in namespace 'std'
-# does not name a type
-BOOST_DEFUN([Format],
-[BOOST_FIND_HEADER([boost/format.hpp])])
-
-
-# BOOST_FUNCTION()
-# ----------------
-# Look for Boost.Function
-BOOST_DEFUN([Function],
-[BOOST_FIND_HEADER([boost/function.hpp])])
-
-
-# BOOST_GRAPH([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.Graphs. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Graph],
-[BOOST_FIND_LIB([graph], [$1],
- [boost/graph/adjacency_list.hpp], [boost::adjacency_list<> g;])
-])# BOOST_GRAPH
-
-
-# BOOST_IOSTREAMS([PREFERRED-RT-OPT])
-# -----------------------------------
-# Look for Boost.IOStreams. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([IOStreams],
-[BOOST_FIND_LIB([iostreams], [$1],
- [boost/iostreams/device/file_descriptor.hpp],
- [boost::iostreams::file_descriptor fd; fd.close();])
-])# BOOST_IOSTREAMS
-
-
-# BOOST_HASH()
-# ------------
-# Look for Boost.Functional/Hash
-BOOST_DEFUN([Hash],
-[BOOST_FIND_HEADER([boost/functional/hash.hpp])])
-
-
-# BOOST_LAMBDA()
-# --------------
-# Look for Boost.Lambda
-BOOST_DEFUN([Lambda],
-[BOOST_FIND_HEADER([boost/lambda/lambda.hpp])])
-
-
-# BOOST_LOG([PREFERRED-RT-OPT])
-# -----------------------------
-# Look for Boost.Log For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Log],
-[BOOST_FIND_LIB([log], [$1],
- [boost/log/core/core.hpp],
- [boost::log::attribute a; a.get_value();])
-])# BOOST_LOG
-
-
-# BOOST_LOG_SETUP([PREFERRED-RT-OPT])
-# -----------------------------------
-# Look for Boost.Log For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Log_Setup],
-[AC_REQUIRE([BOOST_LOG])dnl
-BOOST_FIND_LIB([log_setup], [$1],
- [boost/log/utility/init/from_settings.hpp],
- [boost::log::basic_settings<char> bs; bs.empty();])
-])# BOOST_LOG_SETUP
-
-
-# BOOST_MATH()
-# ------------
-# Look for Boost.Math
-# TODO: This library isn't header-only but it comes in multiple different
-# flavors that don't play well with BOOST_FIND_LIB (e.g, libboost_math_c99,
-# libboost_math_c99f, libboost_math_c99l, libboost_math_tr1,
-# libboost_math_tr1f, libboost_math_tr1l). This macro must be fixed to do the
-# right thing anyway.
-BOOST_DEFUN([Math],
-[BOOST_FIND_HEADER([boost/math/special_functions.hpp])])
-
-
-# BOOST_MULTIARRAY()
-# ------------------
-# Look for Boost.MultiArray
-BOOST_DEFUN([MultiArray],
-[BOOST_FIND_HEADER([boost/multi_array.hpp])])
-
-
-# BOOST_NUMERIC_CONVERSION()
-# --------------------------
-# Look for Boost.NumericConversion (policy-based numeric conversion)
-BOOST_DEFUN([Numeric_Conversion],
-[BOOST_FIND_HEADER([boost/numeric/conversion/converter.hpp])
-])# BOOST_NUMERIC_CONVERSION
-
-
-# BOOST_OPTIONAL()
-# ----------------
-# Look for Boost.Optional
-BOOST_DEFUN([Optional],
-[BOOST_FIND_HEADER([boost/optional.hpp])])
-
-
-# BOOST_PREPROCESSOR()
-# --------------------
-# Look for Boost.Preprocessor
-BOOST_DEFUN([Preprocessor],
-[BOOST_FIND_HEADER([boost/preprocessor/repeat.hpp])])
-
-
-# BOOST_UNORDERED()
-# -----------------
-# Look for Boost.Unordered
-BOOST_DEFUN([Unordered],
-[BOOST_FIND_HEADER([boost/unordered_map.hpp])])
-
-
-# BOOST_UUID()
-# ------------
-# Look for Boost.Uuid
-BOOST_DEFUN([Uuid],
-[BOOST_FIND_HEADER([boost/uuid/uuid.hpp])])
-
-
-# BOOST_PROGRAM_OPTIONS([PREFERRED-RT-OPT])
-# -----------------------------------------
-# Look for Boost.Program_options. For the documentation of PREFERRED-RT-OPT,
-# see the documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Program_Options],
-[BOOST_FIND_LIB([program_options], [$1],
- [boost/program_options.hpp],
- [boost::program_options::options_description d("test");])
-])# BOOST_PROGRAM_OPTIONS
-
-
-
-# _BOOST_PYTHON_CONFIG(VARIABLE, FLAG)
-# ------------------------------------
-# Save VARIABLE, and define it via `python-config --FLAG`.
-# Substitute BOOST_PYTHON_VARIABLE.
-m4_define([_BOOST_PYTHON_CONFIG],
-[AC_SUBST([BOOST_PYTHON_$1],
- [`python-config --$2 2>/dev/null`])dnl
-boost_python_save_$1=$$1
-$1="$$1 $BOOST_PYTHON_$1"])
-
-
-# BOOST_PYTHON([PREFERRED-RT-OPT])
-# --------------------------------
-# Look for Boost.Python. For the documentation of PREFERRED-RT-OPT,
-# see the documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Python],
-[_BOOST_PYTHON_CONFIG([CPPFLAGS], [includes])
-_BOOST_PYTHON_CONFIG([LDFLAGS], [ldflags])
-_BOOST_PYTHON_CONFIG([LIBS], [libs])
-m4_pattern_allow([^BOOST_PYTHON_MODULE$])dnl
-BOOST_FIND_LIB([python], [$1],
- [boost/python.hpp],
- [], [BOOST_PYTHON_MODULE(empty) {}])
-CPPFLAGS=$boost_python_save_CPPFLAGS
-LDFLAGS=$boost_python_save_LDFLAGS
-LIBS=$boost_python_save_LIBS
-])# BOOST_PYTHON
-
-
-# BOOST_REF()
-# -----------
-# Look for Boost.Ref
-BOOST_DEFUN([Ref],
-[BOOST_FIND_HEADER([boost/ref.hpp])])
-
-
-# BOOST_REGEX([PREFERRED-RT-OPT])
-# -------------------------------
-# Look for Boost.Regex. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Regex],
-[BOOST_FIND_LIB([regex], [$1],
- [boost/regex.hpp],
- [boost::regex exp("*"); boost::regex_match("foo", exp);])
-])# BOOST_REGEX
-
-
-# BOOST_SERIALIZATION([PREFERRED-RT-OPT])
-# ---------------------------------------
-# Look for Boost.Serialization. For the documentation of PREFERRED-RT-OPT, see
-# the documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Serialization],
-[BOOST_FIND_LIB([serialization], [$1],
- [boost/archive/text_oarchive.hpp],
- [std::ostream* o = 0; // Cheap way to get an ostream...
- boost::archive::text_oarchive t(*o);])
-])# BOOST_SERIALIZATION
-
-
-# BOOST_SIGNALS([PREFERRED-RT-OPT])
-# ---------------------------------
-# Look for Boost.Signals. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Signals],
-[BOOST_FIND_LIB([signals], [$1],
- [boost/signal.hpp],
- [boost::signal<void ()> s;])
-])# BOOST_SIGNALS
-
-
-# BOOST_SMART_PTR()
-# -----------------
-# Look for Boost.SmartPtr
-BOOST_DEFUN([Smart_Ptr],
-[BOOST_FIND_HEADER([boost/scoped_ptr.hpp])
-BOOST_FIND_HEADER([boost/shared_ptr.hpp])
-])
-
-
-# BOOST_STATICASSERT()
-# --------------------
-# Look for Boost.StaticAssert
-BOOST_DEFUN([StaticAssert],
-[BOOST_FIND_HEADER([boost/static_assert.hpp])])
-
-
-# BOOST_STRING_ALGO()
-# -------------------
-# Look for Boost.StringAlgo
-BOOST_DEFUN([String_Algo],
-[BOOST_FIND_HEADER([boost/algorithm/string.hpp])
-])
-
-
-# BOOST_SYSTEM([PREFERRED-RT-OPT])
-# --------------------------------
-# Look for Boost.System. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above. This library was introduced in Boost
-# 1.35.0.
-BOOST_DEFUN([System],
-[BOOST_FIND_LIB([system], [$1],
- [boost/system/error_code.hpp],
- [boost::system::error_code e; e.clear();])
-])# BOOST_SYSTEM
-
-
-# BOOST_TEST([PREFERRED-RT-OPT])
-# ------------------------------
-# Look for Boost.Test. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Test],
-[m4_pattern_allow([^BOOST_CHECK$])dnl
-BOOST_FIND_LIB([unit_test_framework], [$1],
- [boost/test/unit_test.hpp], [BOOST_CHECK(2 == 2);],
- [using boost::unit_test::test_suite;
- test_suite* init_unit_test_suite(int argc, char ** argv)
- { return NULL; }])
-])# BOOST_TEST
-
-
-# BOOST_THREADS([PREFERRED-RT-OPT])
-# ---------------------------------
-# Look for Boost.Thread. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-# FIXME: Provide an alias "BOOST_THREAD".
-BOOST_DEFUN([Threads],
-[dnl Having the pthread flag is required at least on GCC3 where
-dnl boost/thread.hpp would complain if we try to compile without
-dnl -pthread on GNU/Linux.
-AC_REQUIRE([_BOOST_PTHREAD_FLAG])dnl
-boost_threads_save_LIBS=$LIBS
-boost_threads_save_CPPFLAGS=$CPPFLAGS
-LIBS="$LIBS $boost_cv_pthread_flag"
-# Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
-# boost/thread.hpp will trigger a #error if -pthread isn't used:
-# boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
-# is not turned on. Please set the correct command line options for
-# threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
-CPPFLAGS="$CPPFLAGS $boost_cv_pthread_flag"
-BOOST_FIND_LIB([thread], [$1],
- [boost/thread.hpp], [boost::thread t; boost::mutex m;])
-BOOST_THREAD_LIBS="$BOOST_THREAD_LIBS $boost_cv_pthread_flag"
-BOOST_CPPFLAGS="$BOOST_CPPFLAGS $boost_cv_pthread_flag"
-LIBS=$boost_threads_save_LIBS
-CPPFLAGS=$boost_threads_save_CPPFLAGS
-])# BOOST_THREADS
-
-
-# BOOST_TOKENIZER()
-# -----------------
-# Look for Boost.Tokenizer
-BOOST_DEFUN([Tokenizer],
-[BOOST_FIND_HEADER([boost/tokenizer.hpp])])
-
-
-# BOOST_TRIBOOL()
-# ---------------
-# Look for Boost.Tribool
-BOOST_DEFUN([Tribool],
-[BOOST_FIND_HEADER([boost/logic/tribool_fwd.hpp])
-BOOST_FIND_HEADER([boost/logic/tribool.hpp])
-])
-
-
-# BOOST_TUPLE()
-# -------------
-# Look for Boost.Tuple
-BOOST_DEFUN([Tuple],
-[BOOST_FIND_HEADER([boost/tuple/tuple.hpp])])
-
-
-# BOOST_TYPETRAITS()
-# --------------------
-# Look for Boost.TypeTraits
-BOOST_DEFUN([TypeTraits],
-[BOOST_FIND_HEADER([boost/type_traits.hpp])])
-
-
-# BOOST_UTILITY()
-# ---------------
-# Look for Boost.Utility (noncopyable, result_of, base-from-member idiom,
-# etc.)
-BOOST_DEFUN([Utility],
-[BOOST_FIND_HEADER([boost/utility.hpp])])
-
-
-# BOOST_VARIANT()
-# ---------------
-# Look for Boost.Variant.
-BOOST_DEFUN([Variant],
-[BOOST_FIND_HEADER([boost/variant/variant_fwd.hpp])
-BOOST_FIND_HEADER([boost/variant.hpp])])
-
-
-# BOOST_WAVE([PREFERRED-RT-OPT])
-# ------------------------------
-# NOTE: If you intend to use Wave/Spirit with thread support, make sure you
-# call BOOST_THREADS first.
-# Look for Boost.Wave. For the documentation of PREFERRED-RT-OPT, see the
-# documentation of BOOST_FIND_LIB above.
-BOOST_DEFUN([Wave],
-[AC_REQUIRE([BOOST_FILESYSTEM])dnl
-AC_REQUIRE([BOOST_DATE_TIME])dnl
-boost_wave_save_LIBS=$LIBS
-boost_wave_save_LDFLAGS=$LDFLAGS
-m4_pattern_allow([^BOOST_((FILE)?SYSTEM|DATE_TIME|THREAD)_(LIBS|LDFLAGS)$])dnl
-LIBS="$LIBS $BOOST_SYSTEM_LIBS $BOOST_FILESYSTEM_LIBS $BOOST_DATE_TIME_LIBS\
-$BOOST_THREAD_LIBS"
-LDFLAGS="$LDFLAGS $BOOST_SYSTEM_LDFLAGS $BOOST_FILESYSTEM_LDFLAGS\
-$BOOST_DATE_TIME_LDFLAGS $BOOST_THREAD_LDFLAGS"
-BOOST_FIND_LIB([wave], [$1],
- [boost/wave.hpp],
- [boost::wave::token_id id; get_token_name(id);])
-LIBS=$boost_wave_save_LIBS
-LDFLAGS=$boost_wave_save_LDFLAGS
-])# BOOST_WAVE
-
-
-# BOOST_XPRESSIVE()
-# -----------------
-# Look for Boost.Xpressive (new since 1.36.0).
-BOOST_DEFUN([Xpressive],
-[BOOST_FIND_HEADER([boost/xpressive/xpressive.hpp])])
-
-
-# ----------------- #
-# Internal helpers. #
-# ----------------- #
-
-
-# _BOOST_PTHREAD_FLAG()
-# ---------------------
-# Internal helper for BOOST_THREADS. Based on ACX_PTHREAD:
-# http://autoconf-archive.cryp.to/acx_pthread.html
-AC_DEFUN([_BOOST_PTHREAD_FLAG],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_LANG_PUSH([C++])dnl
-AC_CACHE_CHECK([for the flags needed to use pthreads], [boost_cv_pthread_flag],
-[ boost_cv_pthread_flag=
- # The ordering *is* (sometimes) important. Some notes on the
- # individual items follow:
- # (none): in case threads are in libc; should be tried before -Kthread and
- # other compiler flags to prevent continual compiler warnings
- # -lpthreads: AIX (must check this before -lpthread)
- # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
- # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
- # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
- # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
- # -pthreads: Solaris/GCC
- # -mthreads: MinGW32/GCC, Lynx/GCC
- # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
- # doesn't hurt to check since this sometimes defines pthreads too;
- # also defines -D_REENTRANT)
- # ... -mt is also the pthreads flag for HP/aCC
- # -lpthread: GNU Linux, etc.
- # --thread-safe: KAI C++
- case $host_os in #(
- *solaris*)
- # On Solaris (at least, for some versions), libc contains stubbed
- # (non-functional) versions of the pthreads routines, so link-based
- # tests will erroneously succeed. (We need to link with -pthreads/-mt/
- # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
- # a function called by this macro, so we could check for that, but
- # who knows whether they'll stub that too in a future libc.) So,
- # we'll just look for -pthreads and -lpthread first:
- boost_pthread_flags="-pthreads -lpthread -mt -pthread";; #(
- *)
- boost_pthread_flags="-lpthreads -Kthread -kthread -llthread -pthread \
- -pthreads -mthreads -lpthread --thread-safe -mt";;
- esac
- # Generate the test file.
- AC_LANG_CONFTEST([AC_LANG_PROGRAM([#include <pthread.h>],
- [pthread_t th; pthread_join(th, 0);
- pthread_attr_init(0); pthread_cleanup_push(0, 0);
- pthread_create(0,0,0,0); pthread_cleanup_pop(0);])])
- for boost_pthread_flag in '' $boost_pthread_flags; do
- boost_pthread_ok=false
-dnl Re-use the test file already generated.
- boost_pthreads__save_LIBS=$LIBS
- LIBS="$LIBS $boost_pthread_flag"
- AC_LINK_IFELSE([],
- [if grep ".*$boost_pthread_flag" conftest.err; then
- echo "This flag seems to have triggered warnings" >&AS_MESSAGE_LOG_FD
- else
- boost_pthread_ok=:; boost_cv_pthread_flag=$boost_pthread_flag
- fi])
- LIBS=$boost_pthreads__save_LIBS
- $boost_pthread_ok && break
- done
-])
-AC_LANG_POP([C++])dnl
-])# _BOOST_PTHREAD_FLAG
-
-
-# _BOOST_gcc_test(MAJOR, MINOR)
-# -----------------------------
-# Internal helper for _BOOST_FIND_COMPILER_TAG.
-m4_define([_BOOST_gcc_test],
-["defined __GNUC__ && __GNUC__ == $1 && __GNUC_MINOR__ == $2 && !defined __ICC @ gcc$1$2"])dnl
-
-
-# _BOOST_FIND_COMPILER_TAG()
-# --------------------------
-# Internal. When Boost is installed without --layout=system, each library
-# filename will hold a suffix that encodes the compiler used during the
-# build. The Boost build system seems to call this a `tag'.
-AC_DEFUN([_BOOST_FIND_COMPILER_TAG],
-[AC_REQUIRE([AC_PROG_CXX])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_CACHE_CHECK([for the toolset name used by Boost for $CXX], [boost_cv_lib_tag],
-[boost_cv_lib_tag=unknown
-if test x$boost_cv_inc_path != xno; then
- AC_LANG_PUSH([C++])dnl
- # The following tests are mostly inspired by boost/config/auto_link.hpp
- # The list is sorted to most recent/common to oldest compiler (in order
- # to increase the likelihood of finding the right compiler with the
- # least number of compilation attempt).
- # Beware that some tests are sensible to the order (for instance, we must
- # look for MinGW before looking for GCC3).
- # I used one compilation test per compiler with a #error to recognize
- # each compiler so that it works even when cross-compiling (let me know
- # if you know a better approach).
- # Known missing tags (known from Boost's tools/build/v2/tools/common.jam):
- # como, edg, kcc, bck, mp, sw, tru, xlc
- # I'm not sure about my test for `il' (be careful: Intel's ICC pre-defines
- # the same defines as GCC's).
- for i in \
- _BOOST_gcc_test(4, 6) \
- _BOOST_gcc_test(4, 5) \
- _BOOST_gcc_test(4, 4) \
- _BOOST_gcc_test(4, 3) \
- _BOOST_gcc_test(4, 2) \
- _BOOST_gcc_test(4, 1) \
- _BOOST_gcc_test(4, 0) \
- "defined __GNUC__ && __GNUC__ == 3 && !defined __ICC \
- && (defined WIN32 || defined WINNT || defined _WIN32 || defined __WIN32 \
- || defined __WIN32__ || defined __WINNT || defined __WINNT__) @ mgw" \
- _BOOST_gcc_test(3, 4) \
- _BOOST_gcc_test(3, 3) \
- "defined _MSC_VER && _MSC_VER >= 1500 @ vc90" \
- "defined _MSC_VER && _MSC_VER == 1400 @ vc80" \
- _BOOST_gcc_test(3, 2) \
- "defined _MSC_VER && _MSC_VER == 1310 @ vc71" \
- _BOOST_gcc_test(3, 1) \
- _BOOST_gcc_test(3, 0) \
- "defined __BORLANDC__ @ bcb" \
- "defined __ICC && (defined __unix || defined __unix__) @ il" \
- "defined __ICL @ iw" \
- "defined _MSC_VER && _MSC_VER == 1300 @ vc7" \
- _BOOST_gcc_test(2, 95) \
- "defined __MWERKS__ && __MWERKS__ <= 0x32FF @ cw9" \
- "defined _MSC_VER && _MSC_VER < 1300 && !defined UNDER_CE @ vc6" \
- "defined _MSC_VER && _MSC_VER < 1300 && defined UNDER_CE @ evc4" \
- "defined __MWERKS__ && __MWERKS__ <= 0x31FF @ cw8"
- do
- boost_tag_test=`expr "X$i" : 'X\([[^@]]*\) @ '`
- boost_tag=`expr "X$i" : 'X[[^@]]* @ \(.*\)'`
- AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#if $boost_tag_test
-/* OK */
-#else
-# error $boost_tag_test
-#endif
-]])], [boost_cv_lib_tag=$boost_tag; break], [])
- done
-AC_LANG_POP([C++])dnl
- case $boost_cv_lib_tag in #(
- # Some newer (>= 1.35?) versions of Boost seem to only use "gcc" as opposed
- # to "gcc41" for instance.
- *-gcc | *'-gcc ') :;; #( Don't re-add -gcc: it's already in there.
- gcc*)
- boost_tag_x=
- case $host_os in #(
- darwin*)
- if test $boost_major_version -ge 136; then
- # The `x' added in r46793 of Boost.
- boost_tag_x=x
- fi;;
- esac
- # We can specify multiple tags in this variable because it's used by
- # BOOST_FIND_LIB that does a `for tag in -$boost_cv_lib_tag' ...
- boost_cv_lib_tag="$boost_tag_x$boost_cv_lib_tag -${boost_tag_x}gcc"
- ;; #(
- unknown)
- AC_MSG_WARN([[could not figure out which toolset name to use for $CXX]])
- boost_cv_lib_tag=
- ;;
- esac
-fi])dnl end of AC_CACHE_CHECK
-])# _BOOST_FIND_COMPILER_TAG
-
-
-# _BOOST_GUESS_WHETHER_TO_USE_MT()
-# --------------------------------
-# Compile a small test to try to guess whether we should favor MT (Multi
-# Thread) flavors of Boost. Sets boost_guess_use_mt accordingly.
-AC_DEFUN([_BOOST_GUESS_WHETHER_TO_USE_MT],
-[# Check whether we do better use `mt' even though we weren't ask to.
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#if defined _REENTRANT || defined _MT || defined __MT__
-/* use -mt */
-#else
-# error MT not needed
-#endif
-]])], [boost_guess_use_mt=:], [boost_guess_use_mt=false])
-])
-
-# _BOOST_AC_LINK_IFELSE(PROGRAM, [ACTION-IF-TRUE], [ACTION-IF-FALSE])
-# -------------------------------------------------------------------
-# Fork of _AC_LINK_IFELSE that preserves conftest.o across calls. Fragile,
-# will break when Autoconf changes its internals. Requires that you manually
-# rm -f conftest.$ac_objext in between to really different tests, otherwise
-# you will try to link a conftest.o left behind by a previous test.
-# Used to aggressively optimize BOOST_FIND_LIB (see the big comment in this
-# macro).
-#
-# Don't use "break" in the actions, as it would short-circuit some code
-# this macro runs after the actions.
-m4_define([_BOOST_AC_LINK_IFELSE],
-[m4_ifvaln([$1], [AC_LANG_CONFTEST([$1])])dnl
-rm -f conftest$ac_exeext
-boost_save_ac_ext=$ac_ext
-boost_use_source=:
-# If we already have a .o, re-use it. We change $ac_ext so that $ac_link
-# tries to link the existing object file instead of compiling from source.
-test -f conftest.$ac_objext && ac_ext=$ac_objext && boost_use_source=false &&
- _AS_ECHO_LOG([re-using the existing conftest.$ac_objext])
-AS_IF([_AC_DO_STDERR($ac_link) && {
- test -z "$ac_[]_AC_LANG_ABBREV[]_werror_flag" ||
- test ! -s conftest.err
- } && test -s conftest$ac_exeext && {
- test "$cross_compiling" = yes ||
- $as_executable_p conftest$ac_exeext
-dnl FIXME: use AS_TEST_X instead when 2.61 is widespread enough.
- }],
- [$2],
- [if $boost_use_source; then
- _AC_MSG_LOG_CONFTEST
- fi
- $3])
-ac_objext=$boost_save_ac_objext
-ac_ext=$boost_save_ac_ext
-dnl Delete also the IPA/IPO (Inter Procedural Analysis/Optimization)
-dnl information created by the PGI compiler (conftest_ipa8_conftest.oo),
-dnl as it would interfere with the next link command.
-rm -f core conftest.err conftest_ipa8_conftest.oo \
- conftest$ac_exeext m4_ifval([$1], [conftest.$ac_ext])[]dnl
-])# _BOOST_AC_LINK_IFELSE
-
-# Local Variables:
-# mode: autoconf
-# End:
diff --git a/scripts/training/compact-rule-table/m4/libtool.m4 b/scripts/training/compact-rule-table/m4/libtool.m4
deleted file mode 100644
index b1251be60..000000000
--- a/scripts/training/compact-rule-table/m4/libtool.m4
+++ /dev/null
@@ -1,7356 +0,0 @@
-# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
-#
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
-# 2006, 2007, 2008 Free Software Foundation, Inc.
-# Written by Gordon Matzigkeit, 1996
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-m4_define([_LT_COPYING], [dnl
-# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
-# 2006, 2007, 2008 Free Software Foundation, Inc.
-# Written by Gordon Matzigkeit, 1996
-#
-# This file is part of GNU Libtool.
-#
-# GNU Libtool is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as
-# published by the Free Software Foundation; either version 2 of
-# the License, or (at your option) any later version.
-#
-# As a special exception to the GNU General Public License,
-# if you distribute this file as part of a program or library that
-# is built using GNU Libtool, you may include this file under the
-# same distribution terms that you use for the rest of that program.
-#
-# GNU Libtool is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Libtool; see the file COPYING. If not, a copy
-# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
-# obtained by writing to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-])
-
-# serial 56 LT_INIT
-
-
-# LT_PREREQ(VERSION)
-# ------------------
-# Complain and exit if this libtool version is less that VERSION.
-m4_defun([LT_PREREQ],
-[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
- [m4_default([$3],
- [m4_fatal([Libtool version $1 or higher is required],
- 63)])],
- [$2])])
-
-
-# _LT_CHECK_BUILDDIR
-# ------------------
-# Complain if the absolute build directory name contains unusual characters
-m4_defun([_LT_CHECK_BUILDDIR],
-[case `pwd` in
- *\ * | *\ *)
- AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
-esac
-])
-
-
-# LT_INIT([OPTIONS])
-# ------------------
-AC_DEFUN([LT_INIT],
-[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
-AC_BEFORE([$0], [LT_LANG])dnl
-AC_BEFORE([$0], [LT_OUTPUT])dnl
-AC_BEFORE([$0], [LTDL_INIT])dnl
-m4_require([_LT_CHECK_BUILDDIR])dnl
-
-dnl Autoconf doesn't catch unexpanded LT_ macros by default:
-m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
-m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
-dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
-dnl unless we require an AC_DEFUNed macro:
-AC_REQUIRE([LTOPTIONS_VERSION])dnl
-AC_REQUIRE([LTSUGAR_VERSION])dnl
-AC_REQUIRE([LTVERSION_VERSION])dnl
-AC_REQUIRE([LTOBSOLETE_VERSION])dnl
-m4_require([_LT_PROG_LTMAIN])dnl
-
-dnl Parse OPTIONS
-_LT_SET_OPTIONS([$0], [$1])
-
-# This can be used to rebuild libtool when needed
-LIBTOOL_DEPS="$ltmain"
-
-# Always use our own libtool.
-LIBTOOL='$(SHELL) $(top_builddir)/libtool'
-AC_SUBST(LIBTOOL)dnl
-
-_LT_SETUP
-
-# Only expand once:
-m4_define([LT_INIT])
-])# LT_INIT
-
-# Old names:
-AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
-AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
-dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
-
-
-# _LT_CC_BASENAME(CC)
-# -------------------
-# Calculate cc_basename. Skip known compiler wrappers and cross-prefix.
-m4_defun([_LT_CC_BASENAME],
-[for cc_temp in $1""; do
- case $cc_temp in
- compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
- distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
- \-*) ;;
- *) break;;
- esac
-done
-cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"`
-])
-
-
-# _LT_FILEUTILS_DEFAULTS
-# ----------------------
-# It is okay to use these file commands and assume they have been set
-# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
-m4_defun([_LT_FILEUTILS_DEFAULTS],
-[: ${CP="cp -f"}
-: ${MV="mv -f"}
-: ${RM="rm -f"}
-])# _LT_FILEUTILS_DEFAULTS
-
-
-# _LT_SETUP
-# ---------
-m4_defun([_LT_SETUP],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_REQUIRE([AC_CANONICAL_BUILD])dnl
-_LT_DECL([], [host_alias], [0], [The host system])dnl
-_LT_DECL([], [host], [0])dnl
-_LT_DECL([], [host_os], [0])dnl
-dnl
-_LT_DECL([], [build_alias], [0], [The build system])dnl
-_LT_DECL([], [build], [0])dnl
-_LT_DECL([], [build_os], [0])dnl
-dnl
-AC_REQUIRE([AC_PROG_CC])dnl
-AC_REQUIRE([LT_PATH_LD])dnl
-AC_REQUIRE([LT_PATH_NM])dnl
-dnl
-AC_REQUIRE([AC_PROG_LN_S])dnl
-test -z "$LN_S" && LN_S="ln -s"
-_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
-dnl
-AC_REQUIRE([LT_CMD_MAX_LEN])dnl
-_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
-_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
-dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_CHECK_SHELL_FEATURES])dnl
-m4_require([_LT_CMD_RELOAD])dnl
-m4_require([_LT_CHECK_MAGIC_METHOD])dnl
-m4_require([_LT_CMD_OLD_ARCHIVE])dnl
-m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
-
-_LT_CONFIG_LIBTOOL_INIT([
-# See if we are running on zsh, and set the options which allow our
-# commands through without removal of \ escapes INIT.
-if test -n "\${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
-fi
-])
-if test -n "${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
-fi
-
-_LT_CHECK_OBJDIR
-
-m4_require([_LT_TAG_COMPILER])dnl
-_LT_PROG_ECHO_BACKSLASH
-
-case $host_os in
-aix3*)
- # AIX sometimes has problems with the GCC collect2 program. For some
- # reason, if we set the COLLECT_NAMES environment variable, the problems
- # vanish in a puff of smoke.
- if test "X${COLLECT_NAMES+set}" != Xset; then
- COLLECT_NAMES=
- export COLLECT_NAMES
- fi
- ;;
-esac
-
-# Sed substitution that helps us do robust quoting. It backslashifies
-# metacharacters that are still active within double-quoted strings.
-sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
-
-# Same as above, but do not quote variable references.
-double_quote_subst='s/\([["`\\]]\)/\\\1/g'
-
-# Sed substitution to delay expansion of an escaped shell variable in a
-# double_quote_subst'ed string.
-delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
-
-# Sed substitution to delay expansion of an escaped single quote.
-delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
-
-# Sed substitution to avoid accidental globbing in evaled expressions
-no_glob_subst='s/\*/\\\*/g'
-
-# Global variables:
-ofile=libtool
-can_build_shared=yes
-
-# All known linkers require a `.a' archive for static linking (except MSVC,
-# which needs '.lib').
-libext=a
-
-with_gnu_ld="$lt_cv_prog_gnu_ld"
-
-old_CC="$CC"
-old_CFLAGS="$CFLAGS"
-
-# Set sane defaults for various variables
-test -z "$CC" && CC=cc
-test -z "$LTCC" && LTCC=$CC
-test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
-test -z "$LD" && LD=ld
-test -z "$ac_objext" && ac_objext=o
-
-_LT_CC_BASENAME([$compiler])
-
-# Only perform the check for file, if the check method requires it
-test -z "$MAGIC_CMD" && MAGIC_CMD=file
-case $deplibs_check_method in
-file_magic*)
- if test "$file_magic_cmd" = '$MAGIC_CMD'; then
- _LT_PATH_MAGIC
- fi
- ;;
-esac
-
-# Use C for the default configuration in the libtool script
-LT_SUPPORTED_TAG([CC])
-_LT_LANG_C_CONFIG
-_LT_LANG_DEFAULT_CONFIG
-_LT_CONFIG_COMMANDS
-])# _LT_SETUP
-
-
-# _LT_PROG_LTMAIN
-# ---------------
-# Note that this code is called both from `configure', and `config.status'
-# now that we use AC_CONFIG_COMMANDS to generate libtool. Notably,
-# `config.status' has no value for ac_aux_dir unless we are using Automake,
-# so we pass a copy along to make sure it has a sensible value anyway.
-m4_defun([_LT_PROG_LTMAIN],
-[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
-_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
-ltmain="$ac_aux_dir/ltmain.sh"
-])# _LT_PROG_LTMAIN
-
-
-## ------------------------------------- ##
-## Accumulate code for creating libtool. ##
-## ------------------------------------- ##
-
-# So that we can recreate a full libtool script including additional
-# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
-# in macros and then make a single call at the end using the `libtool'
-# label.
-
-
-# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
-# ----------------------------------------
-# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
-m4_define([_LT_CONFIG_LIBTOOL_INIT],
-[m4_ifval([$1],
- [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
- [$1
-])])])
-
-# Initialize.
-m4_define([_LT_OUTPUT_LIBTOOL_INIT])
-
-
-# _LT_CONFIG_LIBTOOL([COMMANDS])
-# ------------------------------
-# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
-m4_define([_LT_CONFIG_LIBTOOL],
-[m4_ifval([$1],
- [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
- [$1
-])])])
-
-# Initialize.
-m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
-
-
-# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
-# -----------------------------------------------------
-m4_defun([_LT_CONFIG_SAVE_COMMANDS],
-[_LT_CONFIG_LIBTOOL([$1])
-_LT_CONFIG_LIBTOOL_INIT([$2])
-])
-
-
-# _LT_FORMAT_COMMENT([COMMENT])
-# -----------------------------
-# Add leading comment marks to the start of each line, and a trailing
-# full-stop to the whole comment if one is not present already.
-m4_define([_LT_FORMAT_COMMENT],
-[m4_ifval([$1], [
-m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
- [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
-)])
-
-
-
-## ------------------------ ##
-## FIXME: Eliminate VARNAME ##
-## ------------------------ ##
-
-
-# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
-# -------------------------------------------------------------------
-# CONFIGNAME is the name given to the value in the libtool script.
-# VARNAME is the (base) name used in the configure script.
-# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
-# VARNAME. Any other value will be used directly.
-m4_define([_LT_DECL],
-[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
- [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
- [m4_ifval([$1], [$1], [$2])])
- lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
- m4_ifval([$4],
- [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
- lt_dict_add_subkey([lt_decl_dict], [$2],
- [tagged?], [m4_ifval([$5], [yes], [no])])])
-])
-
-
-# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
-# --------------------------------------------------------
-m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
-
-
-# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
-# ------------------------------------------------
-m4_define([lt_decl_tag_varnames],
-[_lt_decl_filter([tagged?], [yes], $@)])
-
-
-# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
-# ---------------------------------------------------------
-m4_define([_lt_decl_filter],
-[m4_case([$#],
- [0], [m4_fatal([$0: too few arguments: $#])],
- [1], [m4_fatal([$0: too few arguments: $#: $1])],
- [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
- [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
- [lt_dict_filter([lt_decl_dict], $@)])[]dnl
-])
-
-
-# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
-# --------------------------------------------------
-m4_define([lt_decl_quote_varnames],
-[_lt_decl_filter([value], [1], $@)])
-
-
-# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
-# ---------------------------------------------------
-m4_define([lt_decl_dquote_varnames],
-[_lt_decl_filter([value], [2], $@)])
-
-
-# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
-# ---------------------------------------------------
-m4_define([lt_decl_varnames_tagged],
-[m4_assert([$# <= 2])dnl
-_$0(m4_quote(m4_default([$1], [[, ]])),
- m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
- m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
-m4_define([_lt_decl_varnames_tagged],
-[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
-
-
-# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
-# ------------------------------------------------
-m4_define([lt_decl_all_varnames],
-[_$0(m4_quote(m4_default([$1], [[, ]])),
- m4_if([$2], [],
- m4_quote(lt_decl_varnames),
- m4_quote(m4_shift($@))))[]dnl
-])
-m4_define([_lt_decl_all_varnames],
-[lt_join($@, lt_decl_varnames_tagged([$1],
- lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
-])
-
-
-# _LT_CONFIG_STATUS_DECLARE([VARNAME])
-# ------------------------------------
-# Quote a variable value, and forward it to `config.status' so that its
-# declaration there will have the same value as in `configure'. VARNAME
-# must have a single quote delimited value for this to work.
-m4_define([_LT_CONFIG_STATUS_DECLARE],
-[$1='`$ECHO "X$][$1" | $Xsed -e "$delay_single_quote_subst"`'])
-
-
-# _LT_CONFIG_STATUS_DECLARATIONS
-# ------------------------------
-# We delimit libtool config variables with single quotes, so when
-# we write them to config.status, we have to be sure to quote all
-# embedded single quotes properly. In configure, this macro expands
-# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
-#
-# <var>='`$ECHO "X$<var>" | $Xsed -e "$delay_single_quote_subst"`'
-m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
-[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
- [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
-
-
-# _LT_LIBTOOL_TAGS
-# ----------------
-# Output comment and list of tags supported by the script
-m4_defun([_LT_LIBTOOL_TAGS],
-[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
-available_tags="_LT_TAGS"dnl
-])
-
-
-# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
-# -----------------------------------
-# Extract the dictionary values for VARNAME (optionally with TAG) and
-# expand to a commented shell variable setting:
-#
-# # Some comment about what VAR is for.
-# visible_name=$lt_internal_name
-m4_define([_LT_LIBTOOL_DECLARE],
-[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
- [description])))[]dnl
-m4_pushdef([_libtool_name],
- m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
-m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
- [0], [_libtool_name=[$]$1],
- [1], [_libtool_name=$lt_[]$1],
- [2], [_libtool_name=$lt_[]$1],
- [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
-m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
-])
-
-
-# _LT_LIBTOOL_CONFIG_VARS
-# -----------------------
-# Produce commented declarations of non-tagged libtool config variables
-# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
-# script. Tagged libtool config variables (even for the LIBTOOL CONFIG
-# section) are produced by _LT_LIBTOOL_TAG_VARS.
-m4_defun([_LT_LIBTOOL_CONFIG_VARS],
-[m4_foreach([_lt_var],
- m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
- [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
-
-
-# _LT_LIBTOOL_TAG_VARS(TAG)
-# -------------------------
-m4_define([_LT_LIBTOOL_TAG_VARS],
-[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
- [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
-
-
-# _LT_TAGVAR(VARNAME, [TAGNAME])
-# ------------------------------
-m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
-
-
-# _LT_CONFIG_COMMANDS
-# -------------------
-# Send accumulated output to $CONFIG_STATUS. Thanks to the lists of
-# variables for single and double quote escaping we saved from calls
-# to _LT_DECL, we can put quote escaped variables declarations
-# into `config.status', and then the shell code to quote escape them in
-# for loops in `config.status'. Finally, any additional code accumulated
-# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
-m4_defun([_LT_CONFIG_COMMANDS],
-[AC_PROVIDE_IFELSE([LT_OUTPUT],
- dnl If the libtool generation code has been placed in $CONFIG_LT,
- dnl instead of duplicating it all over again into config.status,
- dnl then we will have config.status run $CONFIG_LT later, so it
- dnl needs to know what name is stored there:
- [AC_CONFIG_COMMANDS([libtool],
- [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
- dnl If the libtool generation code is destined for config.status,
- dnl expand the accumulated commands and init code now:
- [AC_CONFIG_COMMANDS([libtool],
- [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
-])#_LT_CONFIG_COMMANDS
-
-
-# Initialize.
-m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
-[
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-sed_quote_subst='$sed_quote_subst'
-double_quote_subst='$double_quote_subst'
-delay_variable_subst='$delay_variable_subst'
-_LT_CONFIG_STATUS_DECLARATIONS
-LTCC='$LTCC'
-LTCFLAGS='$LTCFLAGS'
-compiler='$compiler_DEFAULT'
-
-# Quote evaled strings.
-for var in lt_decl_all_varnames([[ \
-]], lt_decl_quote_varnames); do
- case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in
- *[[\\\\\\\`\\"\\\$]]*)
- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
- ;;
- *)
- eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
- ;;
- esac
-done
-
-# Double-quote double-evaled strings.
-for var in lt_decl_all_varnames([[ \
-]], lt_decl_dquote_varnames); do
- case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in
- *[[\\\\\\\`\\"\\\$]]*)
- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
- ;;
- *)
- eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
- ;;
- esac
-done
-
-# Fix-up fallback echo if it was mangled by the above quoting rules.
-case \$lt_ECHO in
-*'\\\[$]0 --fallback-echo"')dnl "
- lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\[$]0 --fallback-echo"\[$]/\[$]0 --fallback-echo"/'\`
- ;;
-esac
-
-_LT_OUTPUT_LIBTOOL_INIT
-])
-
-
-# LT_OUTPUT
-# ---------
-# This macro allows early generation of the libtool script (before
-# AC_OUTPUT is called), incase it is used in configure for compilation
-# tests.
-AC_DEFUN([LT_OUTPUT],
-[: ${CONFIG_LT=./config.lt}
-AC_MSG_NOTICE([creating $CONFIG_LT])
-cat >"$CONFIG_LT" <<_LTEOF
-#! $SHELL
-# Generated by $as_me.
-# Run this file to recreate a libtool stub with the current configuration.
-
-lt_cl_silent=false
-SHELL=\${CONFIG_SHELL-$SHELL}
-_LTEOF
-
-cat >>"$CONFIG_LT" <<\_LTEOF
-AS_SHELL_SANITIZE
-_AS_PREPARE
-
-exec AS_MESSAGE_FD>&1
-exec AS_MESSAGE_LOG_FD>>config.log
-{
- echo
- AS_BOX([Running $as_me.])
-} >&AS_MESSAGE_LOG_FD
-
-lt_cl_help="\
-\`$as_me' creates a local libtool stub from the current configuration,
-for use in further configure time tests before the real libtool is
-generated.
-
-Usage: $[0] [[OPTIONS]]
-
- -h, --help print this help, then exit
- -V, --version print version number, then exit
- -q, --quiet do not print progress messages
- -d, --debug don't remove temporary files
-
-Report bugs to <bug-libtool@gnu.org>."
-
-lt_cl_version="\
-m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
-m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
-configured by $[0], generated by m4_PACKAGE_STRING.
-
-Copyright (C) 2008 Free Software Foundation, Inc.
-This config.lt script is free software; the Free Software Foundation
-gives unlimited permision to copy, distribute and modify it."
-
-while test $[#] != 0
-do
- case $[1] in
- --version | --v* | -V )
- echo "$lt_cl_version"; exit 0 ;;
- --help | --h* | -h )
- echo "$lt_cl_help"; exit 0 ;;
- --debug | --d* | -d )
- debug=: ;;
- --quiet | --q* | --silent | --s* | -q )
- lt_cl_silent=: ;;
-
- -*) AC_MSG_ERROR([unrecognized option: $[1]
-Try \`$[0] --help' for more information.]) ;;
-
- *) AC_MSG_ERROR([unrecognized argument: $[1]
-Try \`$[0] --help' for more information.]) ;;
- esac
- shift
-done
-
-if $lt_cl_silent; then
- exec AS_MESSAGE_FD>/dev/null
-fi
-_LTEOF
-
-cat >>"$CONFIG_LT" <<_LTEOF
-_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
-_LTEOF
-
-cat >>"$CONFIG_LT" <<\_LTEOF
-AC_MSG_NOTICE([creating $ofile])
-_LT_OUTPUT_LIBTOOL_COMMANDS
-AS_EXIT(0)
-_LTEOF
-chmod +x "$CONFIG_LT"
-
-# configure is writing to config.log, but config.lt does its own redirection,
-# appending to config.log, which fails on DOS, as config.log is still kept
-# open by configure. Here we exec the FD to /dev/null, effectively closing
-# config.log, so it can be properly (re)opened and appended to by config.lt.
-if test "$no_create" != yes; then
- lt_cl_success=:
- test "$silent" = yes &&
- lt_config_lt_args="$lt_config_lt_args --quiet"
- exec AS_MESSAGE_LOG_FD>/dev/null
- $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
- exec AS_MESSAGE_LOG_FD>>config.log
- $lt_cl_success || AS_EXIT(1)
-fi
-])# LT_OUTPUT
-
-
-# _LT_CONFIG(TAG)
-# ---------------
-# If TAG is the built-in tag, create an initial libtool script with a
-# default configuration from the untagged config vars. Otherwise add code
-# to config.status for appending the configuration named by TAG from the
-# matching tagged config vars.
-m4_defun([_LT_CONFIG],
-[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-_LT_CONFIG_SAVE_COMMANDS([
- m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
- m4_if(_LT_TAG, [C], [
- # See if we are running on zsh, and set the options which allow our
- # commands through without removal of \ escapes.
- if test -n "${ZSH_VERSION+set}" ; then
- setopt NO_GLOB_SUBST
- fi
-
- cfgfile="${ofile}T"
- trap "$RM \"$cfgfile\"; exit 1" 1 2 15
- $RM "$cfgfile"
-
- cat <<_LT_EOF >> "$cfgfile"
-#! $SHELL
-
-# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
-# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
-# NOTE: Changes made to this file will be lost: look at ltmain.sh.
-#
-_LT_COPYING
-_LT_LIBTOOL_TAGS
-
-# ### BEGIN LIBTOOL CONFIG
-_LT_LIBTOOL_CONFIG_VARS
-_LT_LIBTOOL_TAG_VARS
-# ### END LIBTOOL CONFIG
-
-_LT_EOF
-
- case $host_os in
- aix3*)
- cat <<\_LT_EOF >> "$cfgfile"
-# AIX sometimes has problems with the GCC collect2 program. For some
-# reason, if we set the COLLECT_NAMES environment variable, the problems
-# vanish in a puff of smoke.
-if test "X${COLLECT_NAMES+set}" != Xset; then
- COLLECT_NAMES=
- export COLLECT_NAMES
-fi
-_LT_EOF
- ;;
- esac
-
- _LT_PROG_LTMAIN
-
- # We use sed instead of cat because bash on DJGPP gets confused if
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
- || (rm -f "$cfgfile"; exit 1)
-
- _LT_PROG_XSI_SHELLFNS
-
- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
- || (rm -f "$cfgfile"; exit 1)
-
- mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-],
-[cat <<_LT_EOF >> "$ofile"
-
-dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
-dnl in a comment (ie after a #).
-# ### BEGIN LIBTOOL TAG CONFIG: $1
-_LT_LIBTOOL_TAG_VARS(_LT_TAG)
-# ### END LIBTOOL TAG CONFIG: $1
-_LT_EOF
-])dnl /m4_if
-],
-[m4_if([$1], [], [
- PACKAGE='$PACKAGE'
- VERSION='$VERSION'
- TIMESTAMP='$TIMESTAMP'
- RM='$RM'
- ofile='$ofile'], [])
-])dnl /_LT_CONFIG_SAVE_COMMANDS
-])# _LT_CONFIG
-
-
-# LT_SUPPORTED_TAG(TAG)
-# ---------------------
-# Trace this macro to discover what tags are supported by the libtool
-# --tag option, using:
-# autoconf --trace 'LT_SUPPORTED_TAG:$1'
-AC_DEFUN([LT_SUPPORTED_TAG], [])
-
-
-# C support is built-in for now
-m4_define([_LT_LANG_C_enabled], [])
-m4_define([_LT_TAGS], [])
-
-
-# LT_LANG(LANG)
-# -------------
-# Enable libtool support for the given language if not already enabled.
-AC_DEFUN([LT_LANG],
-[AC_BEFORE([$0], [LT_OUTPUT])dnl
-m4_case([$1],
- [C], [_LT_LANG(C)],
- [C++], [_LT_LANG(CXX)],
- [Java], [_LT_LANG(GCJ)],
- [Fortran 77], [_LT_LANG(F77)],
- [Fortran], [_LT_LANG(FC)],
- [Windows Resource], [_LT_LANG(RC)],
- [m4_ifdef([_LT_LANG_]$1[_CONFIG],
- [_LT_LANG($1)],
- [m4_fatal([$0: unsupported language: "$1"])])])dnl
-])# LT_LANG
-
-
-# _LT_LANG(LANGNAME)
-# ------------------
-m4_defun([_LT_LANG],
-[m4_ifdef([_LT_LANG_]$1[_enabled], [],
- [LT_SUPPORTED_TAG([$1])dnl
- m4_append([_LT_TAGS], [$1 ])dnl
- m4_define([_LT_LANG_]$1[_enabled], [])dnl
- _LT_LANG_$1_CONFIG($1)])dnl
-])# _LT_LANG
-
-
-# _LT_LANG_DEFAULT_CONFIG
-# -----------------------
-m4_defun([_LT_LANG_DEFAULT_CONFIG],
-[AC_PROVIDE_IFELSE([AC_PROG_CXX],
- [LT_LANG(CXX)],
- [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
-
-AC_PROVIDE_IFELSE([AC_PROG_F77],
- [LT_LANG(F77)],
- [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
-
-AC_PROVIDE_IFELSE([AC_PROG_FC],
- [LT_LANG(FC)],
- [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
-
-dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
-dnl pulling things in needlessly.
-AC_PROVIDE_IFELSE([AC_PROG_GCJ],
- [LT_LANG(GCJ)],
- [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
- [LT_LANG(GCJ)],
- [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
- [LT_LANG(GCJ)],
- [m4_ifdef([AC_PROG_GCJ],
- [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
- m4_ifdef([A][M_PROG_GCJ],
- [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
- m4_ifdef([LT_PROG_GCJ],
- [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
-
-AC_PROVIDE_IFELSE([LT_PROG_RC],
- [LT_LANG(RC)],
- [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
-])# _LT_LANG_DEFAULT_CONFIG
-
-# Obsolete macros:
-AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
-AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
-AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
-AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
-dnl AC_DEFUN([AC_LIBTOOL_F77], [])
-dnl AC_DEFUN([AC_LIBTOOL_FC], [])
-dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
-
-
-# _LT_TAG_COMPILER
-# ----------------
-m4_defun([_LT_TAG_COMPILER],
-[AC_REQUIRE([AC_PROG_CC])dnl
-
-_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
-_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
-_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
-_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-])# _LT_TAG_COMPILER
-
-
-# _LT_COMPILER_BOILERPLATE
-# ------------------------
-# Check for compiler boilerplate output or warnings with
-# the simple compiler test code.
-m4_defun([_LT_COMPILER_BOILERPLATE],
-[m4_require([_LT_DECL_SED])dnl
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-])# _LT_COMPILER_BOILERPLATE
-
-
-# _LT_LINKER_BOILERPLATE
-# ----------------------
-# Check for linker boilerplate output or warnings with
-# the simple link test code.
-m4_defun([_LT_LINKER_BOILERPLATE],
-[m4_require([_LT_DECL_SED])dnl
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-])# _LT_LINKER_BOILERPLATE
-
-# _LT_REQUIRED_DARWIN_CHECKS
-# -------------------------
-m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
- case $host_os in
- rhapsody* | darwin*)
- AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
- AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
- AC_CHECK_TOOL([LIPO], [lipo], [:])
- AC_CHECK_TOOL([OTOOL], [otool], [:])
- AC_CHECK_TOOL([OTOOL64], [otool64], [:])
- _LT_DECL([], [DSYMUTIL], [1],
- [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
- _LT_DECL([], [NMEDIT], [1],
- [Tool to change global to local symbols on Mac OS X])
- _LT_DECL([], [LIPO], [1],
- [Tool to manipulate fat objects and archives on Mac OS X])
- _LT_DECL([], [OTOOL], [1],
- [ldd/readelf like tool for Mach-O binaries on Mac OS X])
- _LT_DECL([], [OTOOL64], [1],
- [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
-
- AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
- [lt_cv_apple_cc_single_mod=no
- if test -z "${LT_MULTI_MODULE}"; then
- # By default we will add the -single_module flag. You can override
- # by either setting the environment variable LT_MULTI_MODULE
- # non-empty at configure time, or by adding -multi_module to the
- # link flags.
- rm -rf libconftest.dylib*
- echo "int foo(void){return 1;}" > conftest.c
- echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
--dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
- $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
- -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
- _lt_result=$?
- if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then
- lt_cv_apple_cc_single_mod=yes
- else
- cat conftest.err >&AS_MESSAGE_LOG_FD
- fi
- rm -rf libconftest.dylib*
- rm -f conftest.*
- fi])
- AC_CACHE_CHECK([for -exported_symbols_list linker flag],
- [lt_cv_ld_exported_symbols_list],
- [lt_cv_ld_exported_symbols_list=no
- save_LDFLAGS=$LDFLAGS
- echo "_main" > conftest.sym
- LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
- AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
- [lt_cv_ld_exported_symbols_list=yes],
- [lt_cv_ld_exported_symbols_list=no])
- LDFLAGS="$save_LDFLAGS"
- ])
- case $host_os in
- rhapsody* | darwin1.[[012]])
- _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
- darwin1.*)
- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
- darwin*) # darwin 5.x on
- # if running on 10.5 or later, the deployment target defaults
- # to the OS version, if on x86, and 10.4, the deployment
- # target defaults to 10.4. Don't you love it?
- case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
- 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
- _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
- 10.[[012]]*)
- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
- 10.*)
- _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
- esac
- ;;
- esac
- if test "$lt_cv_apple_cc_single_mod" = "yes"; then
- _lt_dar_single_mod='$single_module'
- fi
- if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
- _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
- else
- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
- fi
- if test "$DSYMUTIL" != ":"; then
- _lt_dsymutil='~$DSYMUTIL $lib || :'
- else
- _lt_dsymutil=
- fi
- ;;
- esac
-])
-
-
-# _LT_DARWIN_LINKER_FEATURES
-# --------------------------
-# Checks for linker and compiler features on darwin
-m4_defun([_LT_DARWIN_LINKER_FEATURES],
-[
- m4_require([_LT_REQUIRED_DARWIN_CHECKS])
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_automatic, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
- _LT_TAGVAR(whole_archive_flag_spec, $1)=''
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
- case $cc_basename in
- ifort*) _lt_dar_can_shared=yes ;;
- *) _lt_dar_can_shared=$GCC ;;
- esac
- if test "$_lt_dar_can_shared" = "yes"; then
- output_verbose_link_cmd=echo
- _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
- _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
- _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
- m4_if([$1], [CXX],
-[ if test "$lt_cv_apple_cc_single_mod" != "yes"; then
- _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
- fi
-],[])
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
-])
-
-# _LT_SYS_MODULE_PATH_AIX
-# -----------------------
-# Links a minimal program and checks the executable
-# for the system default hardcoded library path. In most cases,
-# this is /usr/lib:/lib, but when the MPI compilers are used
-# the location of the communication and MPI libs are included too.
-# If we don't find anything, use the default library path according
-# to the aix ld manual.
-m4_defun([_LT_SYS_MODULE_PATH_AIX],
-[m4_require([_LT_DECL_SED])dnl
-AC_LINK_IFELSE(AC_LANG_PROGRAM,[
-lt_aix_libpath_sed='
- /Import File Strings/,/^$/ {
- /^0/ {
- s/^0 *\(.*\)$/\1/
- p
- }
- }'
-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-# Check for a 64-bit object if we didn't find anything.
-if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-fi],[])
-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-])# _LT_SYS_MODULE_PATH_AIX
-
-
-# _LT_SHELL_INIT(ARG)
-# -------------------
-m4_define([_LT_SHELL_INIT],
-[ifdef([AC_DIVERSION_NOTICE],
- [AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)],
- [AC_DIVERT_PUSH(NOTICE)])
-$1
-AC_DIVERT_POP
-])# _LT_SHELL_INIT
-
-
-# _LT_PROG_ECHO_BACKSLASH
-# -----------------------
-# Add some code to the start of the generated configure script which
-# will find an echo command which doesn't interpret backslashes.
-m4_defun([_LT_PROG_ECHO_BACKSLASH],
-[_LT_SHELL_INIT([
-# Check that we are running under the correct shell.
-SHELL=${CONFIG_SHELL-/bin/sh}
-
-case X$lt_ECHO in
-X*--fallback-echo)
- # Remove one level of quotation (which was required for Make).
- ECHO=`echo "$lt_ECHO" | sed 's,\\\\\[$]\\[$]0,'[$]0','`
- ;;
-esac
-
-ECHO=${lt_ECHO-echo}
-if test "X[$]1" = X--no-reexec; then
- # Discard the --no-reexec flag, and continue.
- shift
-elif test "X[$]1" = X--fallback-echo; then
- # Avoid inline document here, it may be left over
- :
-elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then
- # Yippee, $ECHO works!
- :
-else
- # Restart under the correct shell.
- exec $SHELL "[$]0" --no-reexec ${1+"[$]@"}
-fi
-
-if test "X[$]1" = X--fallback-echo; then
- # used as fallback echo
- shift
- cat <<_LT_EOF
-[$]*
-_LT_EOF
- exit 0
-fi
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-if test -z "$lt_ECHO"; then
- if test "X${echo_test_string+set}" != Xset; then
- # find a string as large as possible, as long as the shell can cope with it
- for cmd in 'sed 50q "[$]0"' 'sed 20q "[$]0"' 'sed 10q "[$]0"' 'sed 2q "[$]0"' 'echo test'; do
- # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ...
- if { echo_test_string=`eval $cmd`; } 2>/dev/null &&
- { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null
- then
- break
- fi
- done
- fi
-
- if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- :
- else
- # The Solaris, AIX, and Digital Unix default echo programs unquote
- # backslashes. This makes it impossible to quote backslashes using
- # echo "$something" | sed 's/\\/\\\\/g'
- #
- # So, first we look for a working echo in the user's PATH.
-
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for dir in $PATH /usr/ucb; do
- IFS="$lt_save_ifs"
- if (test -f $dir/echo || test -f $dir/echo$ac_exeext) &&
- test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- ECHO="$dir/echo"
- break
- fi
- done
- IFS="$lt_save_ifs"
-
- if test "X$ECHO" = Xecho; then
- # We didn't find a better echo, so look for alternatives.
- if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- # This shell has a builtin print -r that does the trick.
- ECHO='print -r'
- elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } &&
- test "X$CONFIG_SHELL" != X/bin/ksh; then
- # If we have ksh, try running configure again with it.
- ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh}
- export ORIGINAL_CONFIG_SHELL
- CONFIG_SHELL=/bin/ksh
- export CONFIG_SHELL
- exec $CONFIG_SHELL "[$]0" --no-reexec ${1+"[$]@"}
- else
- # Try using printf.
- ECHO='printf %s\n'
- if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' &&
- echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- # Cool, printf works
- :
- elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` &&
- test "X$echo_testing_string" = 'X\t' &&
- echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL
- export CONFIG_SHELL
- SHELL="$CONFIG_SHELL"
- export SHELL
- ECHO="$CONFIG_SHELL [$]0 --fallback-echo"
- elif echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` &&
- test "X$echo_testing_string" = 'X\t' &&
- echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` &&
- test "X$echo_testing_string" = "X$echo_test_string"; then
- ECHO="$CONFIG_SHELL [$]0 --fallback-echo"
- else
- # maybe with a smaller string...
- prev=:
-
- for cmd in 'echo test' 'sed 2q "[$]0"' 'sed 10q "[$]0"' 'sed 20q "[$]0"' 'sed 50q "[$]0"'; do
- if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null
- then
- break
- fi
- prev="$cmd"
- done
-
- if test "$prev" != 'sed 50q "[$]0"'; then
- echo_test_string=`eval $prev`
- export echo_test_string
- exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "[$]0" ${1+"[$]@"}
- else
- # Oops. We lost completely, so just stick with echo.
- ECHO=echo
- fi
- fi
- fi
- fi
- fi
-fi
-
-# Copy echo and quote the copy suitably for passing to libtool from
-# the Makefile, instead of quoting the original, which is used later.
-lt_ECHO=$ECHO
-if test "X$lt_ECHO" = "X$CONFIG_SHELL [$]0 --fallback-echo"; then
- lt_ECHO="$CONFIG_SHELL \\\$\[$]0 --fallback-echo"
-fi
-
-AC_SUBST(lt_ECHO)
-])
-_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
-_LT_DECL([], [ECHO], [1],
- [An echo program that does not interpret backslashes])
-])# _LT_PROG_ECHO_BACKSLASH
-
-
-# _LT_ENABLE_LOCK
-# ---------------
-m4_defun([_LT_ENABLE_LOCK],
-[AC_ARG_ENABLE([libtool-lock],
- [AS_HELP_STRING([--disable-libtool-lock],
- [avoid locking (might break parallel builds)])])
-test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
-
-# Some flags need to be propagated to the compiler or linker for good
-# libtool support.
-case $host in
-ia64-*-hpux*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if AC_TRY_EVAL(ac_compile); then
- case `/usr/bin/file conftest.$ac_objext` in
- *ELF-32*)
- HPUX_IA64_MODE="32"
- ;;
- *ELF-64*)
- HPUX_IA64_MODE="64"
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-*-*-irix6*)
- # Find out which ABI we are using.
- echo '[#]line __oline__ "configure"' > conftest.$ac_ext
- if AC_TRY_EVAL(ac_compile); then
- if test "$lt_cv_prog_gnu_ld" = yes; then
- case `/usr/bin/file conftest.$ac_objext` in
- *32-bit*)
- LD="${LD-ld} -melf32bsmip"
- ;;
- *N32*)
- LD="${LD-ld} -melf32bmipn32"
- ;;
- *64-bit*)
- LD="${LD-ld} -melf64bmip"
- ;;
- esac
- else
- case `/usr/bin/file conftest.$ac_objext` in
- *32-bit*)
- LD="${LD-ld} -32"
- ;;
- *N32*)
- LD="${LD-ld} -n32"
- ;;
- *64-bit*)
- LD="${LD-ld} -64"
- ;;
- esac
- fi
- fi
- rm -rf conftest*
- ;;
-
-x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
-s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if AC_TRY_EVAL(ac_compile); then
- case `/usr/bin/file conftest.o` in
- *32-bit*)
- case $host in
- x86_64-*kfreebsd*-gnu)
- LD="${LD-ld} -m elf_i386_fbsd"
- ;;
- x86_64-*linux*)
- LD="${LD-ld} -m elf_i386"
- ;;
- ppc64-*linux*|powerpc64-*linux*)
- LD="${LD-ld} -m elf32ppclinux"
- ;;
- s390x-*linux*)
- LD="${LD-ld} -m elf_s390"
- ;;
- sparc64-*linux*)
- LD="${LD-ld} -m elf32_sparc"
- ;;
- esac
- ;;
- *64-bit*)
- case $host in
- x86_64-*kfreebsd*-gnu)
- LD="${LD-ld} -m elf_x86_64_fbsd"
- ;;
- x86_64-*linux*)
- LD="${LD-ld} -m elf_x86_64"
- ;;
- ppc*-*linux*|powerpc*-*linux*)
- LD="${LD-ld} -m elf64ppc"
- ;;
- s390*-*linux*|s390*-*tpf*)
- LD="${LD-ld} -m elf64_s390"
- ;;
- sparc*-*linux*)
- LD="${LD-ld} -m elf64_sparc"
- ;;
- esac
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-
-*-*-sco3.2v5*)
- # On SCO OpenServer 5, we need -belf to get full-featured binaries.
- SAVE_CFLAGS="$CFLAGS"
- CFLAGS="$CFLAGS -belf"
- AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
- [AC_LANG_PUSH(C)
- AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
- AC_LANG_POP])
- if test x"$lt_cv_cc_needs_belf" != x"yes"; then
- # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
- CFLAGS="$SAVE_CFLAGS"
- fi
- ;;
-sparc*-*solaris*)
- # Find out which ABI we are using.
- echo 'int i;' > conftest.$ac_ext
- if AC_TRY_EVAL(ac_compile); then
- case `/usr/bin/file conftest.o` in
- *64-bit*)
- case $lt_cv_prog_gnu_ld in
- yes*) LD="${LD-ld} -m elf64_sparc" ;;
- *)
- if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
- LD="${LD-ld} -64"
- fi
- ;;
- esac
- ;;
- esac
- fi
- rm -rf conftest*
- ;;
-esac
-
-need_locks="$enable_libtool_lock"
-])# _LT_ENABLE_LOCK
-
-
-# _LT_CMD_OLD_ARCHIVE
-# -------------------
-m4_defun([_LT_CMD_OLD_ARCHIVE],
-[AC_CHECK_TOOL(AR, ar, false)
-test -z "$AR" && AR=ar
-test -z "$AR_FLAGS" && AR_FLAGS=cru
-_LT_DECL([], [AR], [1], [The archiver])
-_LT_DECL([], [AR_FLAGS], [1])
-
-AC_CHECK_TOOL(STRIP, strip, :)
-test -z "$STRIP" && STRIP=:
-_LT_DECL([], [STRIP], [1], [A symbol stripping program])
-
-AC_CHECK_TOOL(RANLIB, ranlib, :)
-test -z "$RANLIB" && RANLIB=:
-_LT_DECL([], [RANLIB], [1],
- [Commands used to install an old-style archive])
-
-# Determine commands to create old-style static archives.
-old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
-old_postinstall_cmds='chmod 644 $oldlib'
-old_postuninstall_cmds=
-
-if test -n "$RANLIB"; then
- case $host_os in
- openbsd*)
- old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib"
- ;;
- *)
- old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib"
- ;;
- esac
- old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib"
-fi
-_LT_DECL([], [old_postinstall_cmds], [2])
-_LT_DECL([], [old_postuninstall_cmds], [2])
-_LT_TAGDECL([], [old_archive_cmds], [2],
- [Commands used to build an old-style archive])
-])# _LT_CMD_OLD_ARCHIVE
-
-
-# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
-# [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
-# ----------------------------------------------------------------
-# Check whether the given compiler option works
-AC_DEFUN([_LT_COMPILER_OPTION],
-[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_DECL_SED])dnl
-AC_CACHE_CHECK([$1], [$2],
- [$2=no
- m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
- lt_compiler_flag="$3"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- # The option is referenced via a variable to avoid confusing sed.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
- (eval "$lt_compile" 2>conftest.err)
- ac_status=$?
- cat conftest.err >&AS_MESSAGE_LOG_FD
- echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
- if (exit $ac_status) && test -s "$ac_outfile"; then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings other than the usual output.
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
- $2=yes
- fi
- fi
- $RM conftest*
-])
-
-if test x"[$]$2" = xyes; then
- m4_if([$5], , :, [$5])
-else
- m4_if([$6], , :, [$6])
-fi
-])# _LT_COMPILER_OPTION
-
-# Old name:
-AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
-
-
-# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
-# [ACTION-SUCCESS], [ACTION-FAILURE])
-# ----------------------------------------------------
-# Check whether the given linker option works
-AC_DEFUN([_LT_LINKER_OPTION],
-[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_DECL_SED])dnl
-AC_CACHE_CHECK([$1], [$2],
- [$2=no
- save_LDFLAGS="$LDFLAGS"
- LDFLAGS="$LDFLAGS $3"
- echo "$lt_simple_link_test_code" > conftest.$ac_ext
- if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
- # The linker can only warn and ignore the option if not recognized
- # So say no if there are warnings
- if test -s conftest.err; then
- # Append any errors to the config.log.
- cat conftest.err 1>&AS_MESSAGE_LOG_FD
- $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp
- $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
- if diff conftest.exp conftest.er2 >/dev/null; then
- $2=yes
- fi
- else
- $2=yes
- fi
- fi
- $RM -r conftest*
- LDFLAGS="$save_LDFLAGS"
-])
-
-if test x"[$]$2" = xyes; then
- m4_if([$4], , :, [$4])
-else
- m4_if([$5], , :, [$5])
-fi
-])# _LT_LINKER_OPTION
-
-# Old name:
-AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
-
-
-# LT_CMD_MAX_LEN
-#---------------
-AC_DEFUN([LT_CMD_MAX_LEN],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-# find the maximum length of command line arguments
-AC_MSG_CHECKING([the maximum length of command line arguments])
-AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
- i=0
- teststring="ABCD"
-
- case $build_os in
- msdosdjgpp*)
- # On DJGPP, this test can blow up pretty badly due to problems in libc
- # (any single argument exceeding 2000 bytes causes a buffer overrun
- # during glob expansion). Even if it were fixed, the result of this
- # check would be larger than it should be.
- lt_cv_sys_max_cmd_len=12288; # 12K is about right
- ;;
-
- gnu*)
- # Under GNU Hurd, this test is not required because there is
- # no limit to the length of command line arguments.
- # Libtool will interpret -1 as no limit whatsoever
- lt_cv_sys_max_cmd_len=-1;
- ;;
-
- cygwin* | mingw* | cegcc*)
- # On Win9x/ME, this test blows up -- it succeeds, but takes
- # about 5 minutes as the teststring grows exponentially.
- # Worse, since 9x/ME are not pre-emptively multitasking,
- # you end up with a "frozen" computer, even though with patience
- # the test eventually succeeds (with a max line length of 256k).
- # Instead, let's just punt: use the minimum linelength reported by
- # all of the supported platforms: 8192 (on NT/2K/XP).
- lt_cv_sys_max_cmd_len=8192;
- ;;
-
- amigaos*)
- # On AmigaOS with pdksh, this test takes hours, literally.
- # So we just punt and use a minimum line length of 8192.
- lt_cv_sys_max_cmd_len=8192;
- ;;
-
- netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
- # This has been around since 386BSD, at least. Likely further.
- if test -x /sbin/sysctl; then
- lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
- elif test -x /usr/sbin/sysctl; then
- lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
- else
- lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs
- fi
- # And add a safety zone
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
- ;;
-
- interix*)
- # We know the value 262144 and hardcode it with a safety zone (like BSD)
- lt_cv_sys_max_cmd_len=196608
- ;;
-
- osf*)
- # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
- # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
- # nice to cause kernel panics so lets avoid the loop below.
- # First set a reasonable default.
- lt_cv_sys_max_cmd_len=16384
- #
- if test -x /sbin/sysconfig; then
- case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
- *1*) lt_cv_sys_max_cmd_len=-1 ;;
- esac
- fi
- ;;
- sco3.2v5*)
- lt_cv_sys_max_cmd_len=102400
- ;;
- sysv5* | sco5v6* | sysv4.2uw2*)
- kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
- if test -n "$kargmax"; then
- lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'`
- else
- lt_cv_sys_max_cmd_len=32768
- fi
- ;;
- *)
- lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
- if test -n "$lt_cv_sys_max_cmd_len"; then
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
- else
- # Make teststring a little bigger before we do anything with it.
- # a 1K string should be a reasonable start.
- for i in 1 2 3 4 5 6 7 8 ; do
- teststring=$teststring$teststring
- done
- SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
- # If test is not a shell built-in, we'll probably end up computing a
- # maximum length that is only half of the actual maximum length, but
- # we can't tell.
- while { test "X"`$SHELL [$]0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \
- = "XX$teststring$teststring"; } >/dev/null 2>&1 &&
- test $i != 17 # 1/2 MB should be enough
- do
- i=`expr $i + 1`
- teststring=$teststring$teststring
- done
- # Only check the string length outside the loop.
- lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
- teststring=
- # Add a significant safety factor because C++ compilers can tack on
- # massive amounts of additional arguments before passing them to the
- # linker. It appears as though 1/2 is a usable value.
- lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
- fi
- ;;
- esac
-])
-if test -n $lt_cv_sys_max_cmd_len ; then
- AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
-else
- AC_MSG_RESULT(none)
-fi
-max_cmd_len=$lt_cv_sys_max_cmd_len
-_LT_DECL([], [max_cmd_len], [0],
- [What is the maximum length of a command?])
-])# LT_CMD_MAX_LEN
-
-# Old name:
-AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
-
-
-# _LT_HEADER_DLFCN
-# ----------------
-m4_defun([_LT_HEADER_DLFCN],
-[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
-])# _LT_HEADER_DLFCN
-
-
-# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
-# ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
-# ----------------------------------------------------------------
-m4_defun([_LT_TRY_DLOPEN_SELF],
-[m4_require([_LT_HEADER_DLFCN])dnl
-if test "$cross_compiling" = yes; then :
- [$4]
-else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
-[#line __oline__ "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-# define LT_DLGLOBAL RTLD_GLOBAL
-#else
-# ifdef DL_GLOBAL
-# define LT_DLGLOBAL DL_GLOBAL
-# else
-# define LT_DLGLOBAL 0
-# endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
- find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-# ifdef RTLD_LAZY
-# define LT_DLLAZY_OR_NOW RTLD_LAZY
-# else
-# ifdef DL_LAZY
-# define LT_DLLAZY_OR_NOW DL_LAZY
-# else
-# ifdef RTLD_NOW
-# define LT_DLLAZY_OR_NOW RTLD_NOW
-# else
-# ifdef DL_NOW
-# define LT_DLLAZY_OR_NOW DL_NOW
-# else
-# define LT_DLLAZY_OR_NOW 0
-# endif
-# endif
-# endif
-# endif
-#endif
-
-void fnord() { int i=42;}
-int main ()
-{
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
- int status = $lt_dlunknown;
-
- if (self)
- {
- if (dlsym (self,"fnord")) status = $lt_dlno_uscore;
- else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore;
- /* dlclose (self); */
- }
- else
- puts (dlerror ());
-
- return status;
-}]
-_LT_EOF
- if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
- (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
- lt_status=$?
- case x$lt_status in
- x$lt_dlno_uscore) $1 ;;
- x$lt_dlneed_uscore) $2 ;;
- x$lt_dlunknown|x*) $3 ;;
- esac
- else :
- # compilation failed
- $3
- fi
-fi
-rm -fr conftest*
-])# _LT_TRY_DLOPEN_SELF
-
-
-# LT_SYS_DLOPEN_SELF
-# ------------------
-AC_DEFUN([LT_SYS_DLOPEN_SELF],
-[m4_require([_LT_HEADER_DLFCN])dnl
-if test "x$enable_dlopen" != xyes; then
- enable_dlopen=unknown
- enable_dlopen_self=unknown
- enable_dlopen_self_static=unknown
-else
- lt_cv_dlopen=no
- lt_cv_dlopen_libs=
-
- case $host_os in
- beos*)
- lt_cv_dlopen="load_add_on"
- lt_cv_dlopen_libs=
- lt_cv_dlopen_self=yes
- ;;
-
- mingw* | pw32* | cegcc*)
- lt_cv_dlopen="LoadLibrary"
- lt_cv_dlopen_libs=
- ;;
-
- cygwin*)
- lt_cv_dlopen="dlopen"
- lt_cv_dlopen_libs=
- ;;
-
- darwin*)
- # if libdl is installed we need to link against it
- AC_CHECK_LIB([dl], [dlopen],
- [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
- lt_cv_dlopen="dyld"
- lt_cv_dlopen_libs=
- lt_cv_dlopen_self=yes
- ])
- ;;
-
- *)
- AC_CHECK_FUNC([shl_load],
- [lt_cv_dlopen="shl_load"],
- [AC_CHECK_LIB([dld], [shl_load],
- [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
- [AC_CHECK_FUNC([dlopen],
- [lt_cv_dlopen="dlopen"],
- [AC_CHECK_LIB([dl], [dlopen],
- [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
- [AC_CHECK_LIB([svld], [dlopen],
- [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
- [AC_CHECK_LIB([dld], [dld_link],
- [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
- ])
- ])
- ])
- ])
- ])
- ;;
- esac
-
- if test "x$lt_cv_dlopen" != xno; then
- enable_dlopen=yes
- else
- enable_dlopen=no
- fi
-
- case $lt_cv_dlopen in
- dlopen)
- save_CPPFLAGS="$CPPFLAGS"
- test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
-
- save_LDFLAGS="$LDFLAGS"
- wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
-
- save_LIBS="$LIBS"
- LIBS="$lt_cv_dlopen_libs $LIBS"
-
- AC_CACHE_CHECK([whether a program can dlopen itself],
- lt_cv_dlopen_self, [dnl
- _LT_TRY_DLOPEN_SELF(
- lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
- lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
- ])
-
- if test "x$lt_cv_dlopen_self" = xyes; then
- wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
- AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
- lt_cv_dlopen_self_static, [dnl
- _LT_TRY_DLOPEN_SELF(
- lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
- lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross)
- ])
- fi
-
- CPPFLAGS="$save_CPPFLAGS"
- LDFLAGS="$save_LDFLAGS"
- LIBS="$save_LIBS"
- ;;
- esac
-
- case $lt_cv_dlopen_self in
- yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
- *) enable_dlopen_self=unknown ;;
- esac
-
- case $lt_cv_dlopen_self_static in
- yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
- *) enable_dlopen_self_static=unknown ;;
- esac
-fi
-_LT_DECL([dlopen_support], [enable_dlopen], [0],
- [Whether dlopen is supported])
-_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
- [Whether dlopen of programs is supported])
-_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
- [Whether dlopen of statically linked programs is supported])
-])# LT_SYS_DLOPEN_SELF
-
-# Old name:
-AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
-
-
-# _LT_COMPILER_C_O([TAGNAME])
-# ---------------------------
-# Check to see if options -c and -o are simultaneously supported by compiler.
-# This macro does not hard code the compiler like AC_PROG_CC_C_O.
-m4_defun([_LT_COMPILER_C_O],
-[m4_require([_LT_DECL_SED])dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_TAG_COMPILER])dnl
-AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
- [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
- [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
- $RM -r conftest 2>/dev/null
- mkdir conftest
- cd conftest
- mkdir out
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- lt_compiler_flag="-o out/conftest2.$ac_objext"
- # Insert the option either (1) after the last *FLAGS variable, or
- # (2) before a word containing "conftest.", or (3) at the end.
- # Note that $ac_compile itself does not contain backslashes and begins
- # with a dollar sign (not a hyphen), so the echo should work correctly.
- lt_compile=`echo "$ac_compile" | $SED \
- -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
- -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
- -e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
- (eval "$lt_compile" 2>out/conftest.err)
- ac_status=$?
- cat out/conftest.err >&AS_MESSAGE_LOG_FD
- echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
- if (exit $ac_status) && test -s out/conftest2.$ac_objext
- then
- # The compiler can only warn and ignore the option if not recognized
- # So say no if there are warnings
- $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp
- $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
- if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
- _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
- fi
- fi
- chmod u+w . 2>&AS_MESSAGE_LOG_FD
- $RM conftest*
- # SGI C++ compiler will create directory out/ii_files/ for
- # template instantiation
- test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
- $RM out/* && rmdir out
- cd ..
- $RM -r conftest
- $RM conftest*
-])
-_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
- [Does compiler simultaneously support -c and -o options?])
-])# _LT_COMPILER_C_O
-
-
-# _LT_COMPILER_FILE_LOCKS([TAGNAME])
-# ----------------------------------
-# Check to see if we can do hard links to lock some files if needed
-m4_defun([_LT_COMPILER_FILE_LOCKS],
-[m4_require([_LT_ENABLE_LOCK])dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-_LT_COMPILER_C_O([$1])
-
-hard_links="nottested"
-if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
- # do not overwrite the value of need_locks provided by the user
- AC_MSG_CHECKING([if we can lock with hard links])
- hard_links=yes
- $RM conftest*
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- touch conftest.a
- ln conftest.a conftest.b 2>&5 || hard_links=no
- ln conftest.a conftest.b 2>/dev/null && hard_links=no
- AC_MSG_RESULT([$hard_links])
- if test "$hard_links" = no; then
- AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
- need_locks=warn
- fi
-else
- need_locks=no
-fi
-_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
-])# _LT_COMPILER_FILE_LOCKS
-
-
-# _LT_CHECK_OBJDIR
-# ----------------
-m4_defun([_LT_CHECK_OBJDIR],
-[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
-[rm -f .libs 2>/dev/null
-mkdir .libs 2>/dev/null
-if test -d .libs; then
- lt_cv_objdir=.libs
-else
- # MS-DOS does not allow filenames that begin with a dot.
- lt_cv_objdir=_libs
-fi
-rmdir .libs 2>/dev/null])
-objdir=$lt_cv_objdir
-_LT_DECL([], [objdir], [0],
- [The name of the directory that contains temporary libtool files])dnl
-m4_pattern_allow([LT_OBJDIR])dnl
-AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
- [Define to the sub-directory in which libtool stores uninstalled libraries.])
-])# _LT_CHECK_OBJDIR
-
-
-# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
-# --------------------------------------
-# Check hardcoding attributes.
-m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
-[AC_MSG_CHECKING([how to hardcode library paths into programs])
-_LT_TAGVAR(hardcode_action, $1)=
-if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
- test -n "$_LT_TAGVAR(runpath_var, $1)" ||
- test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
-
- # We can hardcode non-existent directories.
- if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
- # If the only mechanism to avoid hardcoding is shlibpath_var, we
- # have to relink, otherwise we might link with an installed library
- # when we should be linking with a yet-to-be-installed one
- ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
- test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
- # Linking always hardcodes the temporary library directory.
- _LT_TAGVAR(hardcode_action, $1)=relink
- else
- # We can link without hardcoding, and we can hardcode nonexisting dirs.
- _LT_TAGVAR(hardcode_action, $1)=immediate
- fi
-else
- # We cannot hardcode anything, or else we can only hardcode existing
- # directories.
- _LT_TAGVAR(hardcode_action, $1)=unsupported
-fi
-AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
-
-if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
- test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
- # Fast installation is not supported
- enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
- test "$enable_shared" = no; then
- # Fast installation is not necessary
- enable_fast_install=needless
-fi
-_LT_TAGDECL([], [hardcode_action], [0],
- [How to hardcode a shared library path into an executable])
-])# _LT_LINKER_HARDCODE_LIBPATH
-
-
-# _LT_CMD_STRIPLIB
-# ----------------
-m4_defun([_LT_CMD_STRIPLIB],
-[m4_require([_LT_DECL_EGREP])
-striplib=
-old_striplib=
-AC_MSG_CHECKING([whether stripping libraries is possible])
-if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
- test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
- test -z "$striplib" && striplib="$STRIP --strip-unneeded"
- AC_MSG_RESULT([yes])
-else
-# FIXME - insert some real tests, host_os isn't really good enough
- case $host_os in
- darwin*)
- if test -n "$STRIP" ; then
- striplib="$STRIP -x"
- old_striplib="$STRIP -S"
- AC_MSG_RESULT([yes])
- else
- AC_MSG_RESULT([no])
- fi
- ;;
- *)
- AC_MSG_RESULT([no])
- ;;
- esac
-fi
-_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
-_LT_DECL([], [striplib], [1])
-])# _LT_CMD_STRIPLIB
-
-
-# _LT_SYS_DYNAMIC_LINKER([TAG])
-# -----------------------------
-# PORTME Fill in your ld.so characteristics
-m4_defun([_LT_SYS_DYNAMIC_LINKER],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-m4_require([_LT_DECL_EGREP])dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_DECL_OBJDUMP])dnl
-m4_require([_LT_DECL_SED])dnl
-AC_MSG_CHECKING([dynamic linker characteristics])
-m4_if([$1],
- [], [
-if test "$GCC" = yes; then
- case $host_os in
- darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
- *) lt_awk_arg="/^libraries:/" ;;
- esac
- lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"`
- if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then
- # if the path contains ";" then we assume it to be the separator
- # otherwise default to the standard path separator (i.e. ":") - it is
- # assumed that no part of a normal pathname contains ";" but that should
- # okay in the real world where ";" in dirpaths is itself problematic.
- lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'`
- else
- lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
- fi
- # Ok, now we have the path, separated by spaces, we can step through it
- # and add multilib dir if necessary.
- lt_tmp_lt_search_path_spec=
- lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
- for lt_sys_path in $lt_search_path_spec; do
- if test -d "$lt_sys_path/$lt_multi_os_dir"; then
- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
- else
- test -d "$lt_sys_path" && \
- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
- fi
- done
- lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk '
-BEGIN {RS=" "; FS="/|\n";} {
- lt_foo="";
- lt_count=0;
- for (lt_i = NF; lt_i > 0; lt_i--) {
- if ($lt_i != "" && $lt_i != ".") {
- if ($lt_i == "..") {
- lt_count++;
- } else {
- if (lt_count == 0) {
- lt_foo="/" $lt_i lt_foo;
- } else {
- lt_count--;
- }
- }
- }
- }
- if (lt_foo != "") { lt_freq[[lt_foo]]++; }
- if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
-}'`
- sys_lib_search_path_spec=`$ECHO $lt_search_path_spec`
-else
- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
-fi])
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
- shlibpath_var=LIBPATH
-
- # AIX 3 has no versioning support, so we append a major version to the name.
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
-
-aix[[4-9]]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- hardcode_into_libs=yes
- if test "$host_cpu" = ia64; then
- # AIX 5 supports IA64
- library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- else
- # With GCC up to 2.95.x, collect2 would create an import file
- # for dependence libraries. The import file would start with
- # the line `#! .'. This would cause the generated library to
- # depend on `.', always an invalid library. This was fixed in
- # development snapshots of GCC prior to 3.0.
- case $host_os in
- aix4 | aix4.[[01]] | aix4.[[01]].*)
- if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
- echo ' yes '
- echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
- :
- else
- can_build_shared=no
- fi
- ;;
- esac
- # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
- # soname into executable. Probably we can add versioning support to
- # collect2, so additional links can be useful in future.
- if test "$aix_use_runtimelinking" = yes; then
- # If using run time linking (on AIX 4.2 or later) use lib<name>.so
- # instead of lib<name>.a to let people know that these are not
- # typical AIX shared libraries.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- else
- # We preserve .a as extension for shared libraries through AIX4.2
- # and later when we are not doing run time linking.
- library_names_spec='${libname}${release}.a $libname.a'
- soname_spec='${libname}${release}${shared_ext}$major'
- fi
- shlibpath_var=LIBPATH
- fi
- ;;
-
-amigaos*)
- case $host_cpu in
- powerpc)
- # Since July 2007 AmigaOS4 officially supports .so libraries.
- # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- ;;
- m68k)
- library_names_spec='$libname.ixlibrary $libname.a'
- # Create ${libname}_ixlibrary.a entries in /sys/libs.
- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
- ;;
- esac
- ;;
-
-beos*)
- library_names_spec='${libname}${shared_ext}'
- dynamic_linker="$host_os ld.so"
- shlibpath_var=LIBRARY_PATH
- ;;
-
-bsdi[[45]]*)
- version_type=linux
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
- sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
- # the default ld.so.conf also contains /usr/contrib/lib and
- # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
- # libtool to hard-code these into programs
- ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
- version_type=windows
- shrext_cmds=".dll"
- need_version=no
- need_lib_prefix=no
-
- case $GCC,$host_os in
- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
- dldir=$destdir/`dirname \$dlpath`~
- test -d \$dldir || mkdir -p \$dldir~
- $install_prog $dir/$dlname \$dldir/$dlname~
- chmod a+x \$dldir/$dlname~
- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
- fi'
- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
- dlpath=$dir/\$dldll~
- $RM \$dlpath'
- shlibpath_overrides_runpath=yes
-
- case $host_os in
- cygwin*)
- # Cygwin DLLs use 'cyg' prefix rather than 'lib'
- soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib"
- ;;
- mingw* | cegcc*)
- # MinGW DLLs use traditional 'lib' prefix
- soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
- sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"`
- if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
- # It is most probably a Windows format PATH printed by
- # mingw gcc, but we are running on Cygwin. Gcc prints its search
- # path with ; separators, and with drive letters. We can handle the
- # drive letters (cygwin fileutils understands them), so leave them,
- # especially as we might pass files found there to a mingw objdump,
- # which wouldn't understand a cygwinified path. Ahh.
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
- else
- sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
- fi
- ;;
- pw32*)
- # pw32 DLLs use 'pw' prefix rather than 'lib'
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
- ;;
-
- *)
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
- ;;
- esac
- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-
-darwin* | rhapsody*)
- dynamic_linker="$host_os dyld"
- version_type=darwin
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
- soname_spec='${libname}${release}${major}$shared_ext'
- shlibpath_overrides_runpath=yes
- shlibpath_var=DYLD_LIBRARY_PATH
- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-m4_if([$1], [],[
- sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
- sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
- ;;
-
-dgux*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-freebsd1*)
- dynamic_linker=no
- ;;
-
-freebsd* | dragonfly*)
- # DragonFly does not have aout. When/if they implement a new
- # versioning mechanism, adjust this.
- if test -x /usr/bin/objformat; then
- objformat=`/usr/bin/objformat`
- else
- case $host_os in
- freebsd[[123]]*) objformat=aout ;;
- *) objformat=elf ;;
- esac
- fi
- version_type=freebsd-$objformat
- case $version_type in
- freebsd-elf*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- need_version=no
- need_lib_prefix=no
- ;;
- freebsd-*)
- library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
- need_version=yes
- ;;
- esac
- shlibpath_var=LD_LIBRARY_PATH
- case $host_os in
- freebsd2*)
- shlibpath_overrides_runpath=yes
- ;;
- freebsd3.[[01]]* | freebsdelf3.[[01]]*)
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
- freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
- *) # from 4.6 on, and DragonFly
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
- esac
- ;;
-
-gnu*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- hardcode_into_libs=yes
- ;;
-
-hpux9* | hpux10* | hpux11*)
- # Give a soname corresponding to the major version so that dld.sl refuses to
- # link against other versions.
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- case $host_cpu in
- ia64*)
- shrext_cmds='.so'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.so"
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- if test "X$HPUX_IA64_MODE" = X32; then
- sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
- else
- sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
- fi
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- hppa*64*)
- shrext_cmds='.sl'
- hardcode_into_libs=yes
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
- shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
- ;;
- *)
- shrext_cmds='.sl'
- dynamic_linker="$host_os dld.sl"
- shlibpath_var=SHLIB_PATH
- shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- ;;
- esac
- # HP-UX runs *really* slowly unless shared libraries are mode 555.
- postinstall_cmds='chmod 555 $lib'
- ;;
-
-interix[[3-9]]*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-irix5* | irix6* | nonstopux*)
- case $host_os in
- nonstopux*) version_type=nonstopux ;;
- *)
- if test "$lt_cv_prog_gnu_ld" = yes; then
- version_type=linux
- else
- version_type=irix
- fi ;;
- esac
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
- case $host_os in
- irix5* | nonstopux*)
- libsuff= shlibsuff=
- ;;
- *)
- case $LD in # libtool.m4 will add one of these switches to LD
- *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
- libsuff= shlibsuff= libmagic=32-bit;;
- *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
- libsuff=32 shlibsuff=N32 libmagic=N32;;
- *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
- libsuff=64 shlibsuff=64 libmagic=64-bit;;
- *) libsuff= shlibsuff= libmagic=never-match;;
- esac
- ;;
- esac
- shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
- sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
- hardcode_into_libs=yes
- ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
- dynamic_linker=no
- ;;
-
-# This must be Linux ELF.
-linux* | k*bsd*-gnu)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- # Some binutils ld are patched to set DT_RUNPATH
- save_LDFLAGS=$LDFLAGS
- save_libdir=$libdir
- eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
- LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
- AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
- [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
- [shlibpath_overrides_runpath=yes])])
- LDFLAGS=$save_LDFLAGS
- libdir=$save_libdir
-
- # This implies no fast_install, which is unacceptable.
- # Some rework will be needed to allow for fast_install
- # before this can be enabled.
- hardcode_into_libs=yes
-
- # Append ld.so.conf contents to the search path
- if test -f /etc/ld.so.conf; then
- lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '`
- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
- fi
-
- # We used to test for /lib/ld.so.1 and disable shared libraries on
- # powerpc, because MkLinux only supported shared libraries with the
- # GNU dynamic linker. Since this was broken with cross compilers,
- # most powerpc-linux boxes support dynamic linking these days and
- # people can always --disable-shared, the test was removed, and we
- # assume the GNU/Linux dynamic linker is in use.
- dynamic_linker='GNU/Linux ld.so'
- ;;
-
-netbsd*)
- version_type=sunos
- need_lib_prefix=no
- need_version=no
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- dynamic_linker='NetBSD (a.out) ld.so'
- else
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- dynamic_linker='NetBSD ld.elf_so'
- fi
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- ;;
-
-newsos6)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- ;;
-
-*nto* | *qnx*)
- version_type=qnx
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- dynamic_linker='ldqnx.so'
- ;;
-
-openbsd*)
- version_type=sunos
- sys_lib_dlsearch_path_spec="/usr/lib"
- need_lib_prefix=no
- # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
- case $host_os in
- openbsd3.3 | openbsd3.3.*) need_version=yes ;;
- *) need_version=no ;;
- esac
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- case $host_os in
- openbsd2.[[89]] | openbsd2.[[89]].*)
- shlibpath_overrides_runpath=no
- ;;
- *)
- shlibpath_overrides_runpath=yes
- ;;
- esac
- else
- shlibpath_overrides_runpath=yes
- fi
- ;;
-
-os2*)
- libname_spec='$name'
- shrext_cmds=".dll"
- need_lib_prefix=no
- library_names_spec='$libname${shared_ext} $libname.a'
- dynamic_linker='OS/2 ld.exe'
- shlibpath_var=LIBPATH
- ;;
-
-osf3* | osf4* | osf5*)
- version_type=osf
- need_lib_prefix=no
- need_version=no
- soname_spec='${libname}${release}${shared_ext}$major'
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
- sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
- ;;
-
-rdos*)
- dynamic_linker=no
- ;;
-
-solaris*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- # ldd complains unless libraries are executable
- postinstall_cmds='chmod +x $lib'
- ;;
-
-sunos4*)
- version_type=sunos
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
- finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- if test "$with_gnu_ld" = yes; then
- need_lib_prefix=no
- fi
- need_version=yes
- ;;
-
-sysv4 | sysv4.3*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- case $host_vendor in
- sni)
- shlibpath_overrides_runpath=no
- need_lib_prefix=no
- runpath_var=LD_RUN_PATH
- ;;
- siemens)
- need_lib_prefix=no
- ;;
- motorola)
- need_lib_prefix=no
- need_version=no
- shlibpath_overrides_runpath=no
- sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
- ;;
- esac
- ;;
-
-sysv4*MP*)
- if test -d /usr/nec ;then
- version_type=linux
- library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
- soname_spec='$libname${shared_ext}.$major'
- shlibpath_var=LD_LIBRARY_PATH
- fi
- ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- version_type=freebsd-elf
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=yes
- hardcode_into_libs=yes
- if test "$with_gnu_ld" = yes; then
- sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
- else
- sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
- case $host_os in
- sco3.2v5*)
- sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
- ;;
- esac
- fi
- sys_lib_dlsearch_path_spec='/usr/lib'
- ;;
-
-tpf*)
- # TPF is a cross-target only. Preferred cross-host = GNU/Linux.
- version_type=linux
- need_lib_prefix=no
- need_version=no
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- shlibpath_var=LD_LIBRARY_PATH
- shlibpath_overrides_runpath=no
- hardcode_into_libs=yes
- ;;
-
-uts4*)
- version_type=linux
- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LD_LIBRARY_PATH
- ;;
-
-*)
- dynamic_linker=no
- ;;
-esac
-AC_MSG_RESULT([$dynamic_linker])
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
- variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
- sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
- sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-_LT_DECL([], [variables_saved_for_relink], [1],
- [Variables whose values should be saved in libtool wrapper scripts and
- restored at link time])
-_LT_DECL([], [need_lib_prefix], [0],
- [Do we need the "lib" prefix for modules?])
-_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
-_LT_DECL([], [version_type], [0], [Library versioning type])
-_LT_DECL([], [runpath_var], [0], [Shared library runtime path variable])
-_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
-_LT_DECL([], [shlibpath_overrides_runpath], [0],
- [Is shlibpath searched before the hard-coded library search path?])
-_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
-_LT_DECL([], [library_names_spec], [1],
- [[List of archive names. First name is the real one, the rest are links.
- The last name is the one that the linker finds with -lNAME]])
-_LT_DECL([], [soname_spec], [1],
- [[The coded name of the library, if different from the real name]])
-_LT_DECL([], [postinstall_cmds], [2],
- [Command to use after installation of a shared archive])
-_LT_DECL([], [postuninstall_cmds], [2],
- [Command to use after uninstallation of a shared archive])
-_LT_DECL([], [finish_cmds], [2],
- [Commands used to finish a libtool library installation in a directory])
-_LT_DECL([], [finish_eval], [1],
- [[As "finish_cmds", except a single script fragment to be evaled but
- not shown]])
-_LT_DECL([], [hardcode_into_libs], [0],
- [Whether we should hardcode library paths into libraries])
-_LT_DECL([], [sys_lib_search_path_spec], [2],
- [Compile-time system search path for libraries])
-_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
- [Run-time system search path for libraries])
-])# _LT_SYS_DYNAMIC_LINKER
-
-
-# _LT_PATH_TOOL_PREFIX(TOOL)
-# --------------------------
-# find a file program which can recognize shared library
-AC_DEFUN([_LT_PATH_TOOL_PREFIX],
-[m4_require([_LT_DECL_EGREP])dnl
-AC_MSG_CHECKING([for $1])
-AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
-[case $MAGIC_CMD in
-[[\\/*] | ?:[\\/]*])
- lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
- ;;
-*)
- lt_save_MAGIC_CMD="$MAGIC_CMD"
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-dnl $ac_dummy forces splitting on constant user-supplied paths.
-dnl POSIX.2 word splitting is done only on the output of word expansions,
-dnl not every word. This closes a longstanding sh security hole.
- ac_dummy="m4_if([$2], , $PATH, [$2])"
- for ac_dir in $ac_dummy; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f $ac_dir/$1; then
- lt_cv_path_MAGIC_CMD="$ac_dir/$1"
- if test -n "$file_magic_test_file"; then
- case $deplibs_check_method in
- "file_magic "*)
- file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
- MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
- if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
- $EGREP "$file_magic_regex" > /dev/null; then
- :
- else
- cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such. This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem. Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
- fi ;;
- esac
- fi
- break
- fi
- done
- IFS="$lt_save_ifs"
- MAGIC_CMD="$lt_save_MAGIC_CMD"
- ;;
-esac])
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
- AC_MSG_RESULT($MAGIC_CMD)
-else
- AC_MSG_RESULT(no)
-fi
-_LT_DECL([], [MAGIC_CMD], [0],
- [Used to examine libraries when file_magic_cmd begins with "file"])dnl
-])# _LT_PATH_TOOL_PREFIX
-
-# Old name:
-AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
-
-
-# _LT_PATH_MAGIC
-# --------------
-# find a file program which can recognize a shared library
-m4_defun([_LT_PATH_MAGIC],
-[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
-if test -z "$lt_cv_path_MAGIC_CMD"; then
- if test -n "$ac_tool_prefix"; then
- _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
- else
- MAGIC_CMD=:
- fi
-fi
-])# _LT_PATH_MAGIC
-
-
-# LT_PATH_LD
-# ----------
-# find the pathname to the GNU or non-GNU linker
-AC_DEFUN([LT_PATH_LD],
-[AC_REQUIRE([AC_PROG_CC])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_REQUIRE([AC_CANONICAL_BUILD])dnl
-m4_require([_LT_DECL_SED])dnl
-m4_require([_LT_DECL_EGREP])dnl
-
-AC_ARG_WITH([gnu-ld],
- [AS_HELP_STRING([--with-gnu-ld],
- [assume the C compiler uses GNU ld @<:@default=no@:>@])],
- [test "$withval" = no || with_gnu_ld=yes],
- [with_gnu_ld=no])dnl
-
-ac_prog=ld
-if test "$GCC" = yes; then
- # Check if gcc -print-prog-name=ld gives a path.
- AC_MSG_CHECKING([for ld used by $CC])
- case $host in
- *-*-mingw*)
- # gcc leaves a trailing carriage return which upsets mingw
- ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
- *)
- ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
- esac
- case $ac_prog in
- # Accept absolute paths.
- [[\\/]]* | ?:[[\\/]]*)
- re_direlt='/[[^/]][[^/]]*/\.\./'
- # Canonicalize the pathname of ld
- ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
- while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
- ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
- done
- test -z "$LD" && LD="$ac_prog"
- ;;
- "")
- # If it fails, then pretend we aren't using GCC.
- ac_prog=ld
- ;;
- *)
- # If it is relative, then search for the first ld in PATH.
- with_gnu_ld=unknown
- ;;
- esac
-elif test "$with_gnu_ld" = yes; then
- AC_MSG_CHECKING([for GNU ld])
-else
- AC_MSG_CHECKING([for non-GNU ld])
-fi
-AC_CACHE_VAL(lt_cv_path_LD,
-[if test -z "$LD"; then
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
- lt_cv_path_LD="$ac_dir/$ac_prog"
- # Check to see if the program is GNU ld. I'd rather use --version,
- # but apparently some variants of GNU ld only accept -v.
- # Break only if it was the GNU/non-GNU ld that we prefer.
- case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
- *GNU* | *'with BFD'*)
- test "$with_gnu_ld" != no && break
- ;;
- *)
- test "$with_gnu_ld" != yes && break
- ;;
- esac
- fi
- done
- IFS="$lt_save_ifs"
-else
- lt_cv_path_LD="$LD" # Let the user override the test with a path.
-fi])
-LD="$lt_cv_path_LD"
-if test -n "$LD"; then
- AC_MSG_RESULT($LD)
-else
- AC_MSG_RESULT(no)
-fi
-test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
-_LT_PATH_LD_GNU
-AC_SUBST([LD])
-
-_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
-])# LT_PATH_LD
-
-# Old names:
-AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
-AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AM_PROG_LD], [])
-dnl AC_DEFUN([AC_PROG_LD], [])
-
-
-# _LT_PATH_LD_GNU
-#- --------------
-m4_defun([_LT_PATH_LD_GNU],
-[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
-[# I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
- lt_cv_prog_gnu_ld=yes
- ;;
-*)
- lt_cv_prog_gnu_ld=no
- ;;
-esac])
-with_gnu_ld=$lt_cv_prog_gnu_ld
-])# _LT_PATH_LD_GNU
-
-
-# _LT_CMD_RELOAD
-# --------------
-# find reload flag for linker
-# -- PORTME Some linkers may need a different reload flag.
-m4_defun([_LT_CMD_RELOAD],
-[AC_CACHE_CHECK([for $LD option to reload object files],
- lt_cv_ld_reload_flag,
- [lt_cv_ld_reload_flag='-r'])
-reload_flag=$lt_cv_ld_reload_flag
-case $reload_flag in
-"" | " "*) ;;
-*) reload_flag=" $reload_flag" ;;
-esac
-reload_cmds='$LD$reload_flag -o $output$reload_objs'
-case $host_os in
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
- else
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- fi
- ;;
-esac
-_LT_DECL([], [reload_flag], [1], [How to create reloadable object files])dnl
-_LT_DECL([], [reload_cmds], [2])dnl
-])# _LT_CMD_RELOAD
-
-
-# _LT_CHECK_MAGIC_METHOD
-# ----------------------
-# how to check for library dependencies
-# -- PORTME fill in with the dynamic library characteristics
-m4_defun([_LT_CHECK_MAGIC_METHOD],
-[m4_require([_LT_DECL_EGREP])
-m4_require([_LT_DECL_OBJDUMP])
-AC_CACHE_CHECK([how to recognize dependent libraries],
-lt_cv_deplibs_check_method,
-[lt_cv_file_magic_cmd='$MAGIC_CMD'
-lt_cv_file_magic_test_file=
-lt_cv_deplibs_check_method='unknown'
-# Need to set the preceding variable on all platforms that support
-# interlibrary dependencies.
-# 'none' -- dependencies not supported.
-# `unknown' -- same as none, but documents that we really don't know.
-# 'pass_all' -- all dependencies passed with no checks.
-# 'test_compile' -- check by making test program.
-# 'file_magic [[regex]]' -- check by looking for files in library path
-# which responds to the $file_magic_cmd with a given extended regex.
-# If you have `file' or equivalent on your system and you're not sure
-# whether `pass_all' will *always* work, you probably want this one.
-
-case $host_os in
-aix[[4-9]]*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-beos*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-bsdi[[45]]*)
- lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
- lt_cv_file_magic_cmd='/usr/bin/file -L'
- lt_cv_file_magic_test_file=/shlib/libc.so
- ;;
-
-cygwin*)
- # func_win32_libid is a shell function defined in ltmain.sh
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- ;;
-
-mingw* | pw32*)
- # Base MSYS/MinGW do not provide the 'file' command needed by
- # func_win32_libid shell function, so use a weaker test based on 'objdump',
- # unless we find 'file', for example because we are cross-compiling.
- if ( file / ) >/dev/null 2>&1; then
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-
-cegcc)
- # use the weaker test based on 'objdump'. See mingw*.
- lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- ;;
-
-darwin* | rhapsody*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-freebsd* | dragonfly*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
- case $host_cpu in
- i*86 )
- # Not sure whether the presence of OpenBSD here was a mistake.
- # Let's accept both of them until this is cleared up.
- lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
- lt_cv_file_magic_cmd=/usr/bin/file
- lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
- ;;
- esac
- else
- lt_cv_deplibs_check_method=pass_all
- fi
- ;;
-
-gnu*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-hpux10.20* | hpux11*)
- lt_cv_file_magic_cmd=/usr/bin/file
- case $host_cpu in
- ia64*)
- lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
- lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
- ;;
- hppa*64*)
- [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]']
- lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
- ;;
- *)
- lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]].[[0-9]]) shared library'
- lt_cv_file_magic_test_file=/usr/lib/libc.sl
- ;;
- esac
- ;;
-
-interix[[3-9]]*)
- # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
- lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
- ;;
-
-irix5* | irix6* | nonstopux*)
- case $LD in
- *-32|*"-32 ") libmagic=32-bit;;
- *-n32|*"-n32 ") libmagic=N32;;
- *-64|*"-64 ") libmagic=64-bit;;
- *) libmagic=never-match;;
- esac
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-# This must be Linux ELF.
-linux* | k*bsd*-gnu)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
- lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
- else
- lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
- fi
- ;;
-
-newos6*)
- lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
- lt_cv_file_magic_cmd=/usr/bin/file
- lt_cv_file_magic_test_file=/usr/lib/libnls.so
- ;;
-
-*nto* | *qnx*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-openbsd*)
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
- else
- lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
- fi
- ;;
-
-osf3* | osf4* | osf5*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-rdos*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-solaris*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-sysv4 | sysv4.3*)
- case $host_vendor in
- motorola)
- lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
- lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
- ;;
- ncr)
- lt_cv_deplibs_check_method=pass_all
- ;;
- sequent)
- lt_cv_file_magic_cmd='/bin/file'
- lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
- ;;
- sni)
- lt_cv_file_magic_cmd='/bin/file'
- lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
- lt_cv_file_magic_test_file=/lib/libc.so
- ;;
- siemens)
- lt_cv_deplibs_check_method=pass_all
- ;;
- pc)
- lt_cv_deplibs_check_method=pass_all
- ;;
- esac
- ;;
-
-tpf*)
- lt_cv_deplibs_check_method=pass_all
- ;;
-esac
-])
-file_magic_cmd=$lt_cv_file_magic_cmd
-deplibs_check_method=$lt_cv_deplibs_check_method
-test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-_LT_DECL([], [deplibs_check_method], [1],
- [Method to check whether dependent libraries are shared objects])
-_LT_DECL([], [file_magic_cmd], [1],
- [Command to use when deplibs_check_method == "file_magic"])
-])# _LT_CHECK_MAGIC_METHOD
-
-
-# LT_PATH_NM
-# ----------
-# find the pathname to a BSD- or MS-compatible name lister
-AC_DEFUN([LT_PATH_NM],
-[AC_REQUIRE([AC_PROG_CC])dnl
-AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
-[if test -n "$NM"; then
- # Let the user override the test.
- lt_cv_path_NM="$NM"
-else
- lt_nm_to_check="${ac_tool_prefix}nm"
- if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
- lt_nm_to_check="$lt_nm_to_check nm"
- fi
- for lt_tmp_nm in $lt_nm_to_check; do
- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
- IFS="$lt_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- tmp_nm="$ac_dir/$lt_tmp_nm"
- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
- # Check to see if the nm accepts a BSD-compat flag.
- # Adding the `sed 1q' prevents false positives on HP-UX, which says:
- # nm: unknown option "B" ignored
- # Tru64's nm complains that /dev/null is an invalid object file
- case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
- */dev/null* | *'Invalid file or object type'*)
- lt_cv_path_NM="$tmp_nm -B"
- break
- ;;
- *)
- case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
- */dev/null*)
- lt_cv_path_NM="$tmp_nm -p"
- break
- ;;
- *)
- lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
- continue # so that we can try to find one that supports BSD flags
- ;;
- esac
- ;;
- esac
- fi
- done
- IFS="$lt_save_ifs"
- done
- : ${lt_cv_path_NM=no}
-fi])
-if test "$lt_cv_path_NM" != "no"; then
- NM="$lt_cv_path_NM"
-else
- # Didn't find any BSD compatible name lister, look for dumpbin.
- AC_CHECK_TOOLS(DUMPBIN, ["dumpbin -symbols" "link -dump -symbols"], :)
- AC_SUBST([DUMPBIN])
- if test "$DUMPBIN" != ":"; then
- NM="$DUMPBIN"
- fi
-fi
-test -z "$NM" && NM=nm
-AC_SUBST([NM])
-_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
-
-AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
- [lt_cv_nm_interface="BSD nm"
- echo "int some_variable = 0;" > conftest.$ac_ext
- (eval echo "\"\$as_me:__oline__: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
- (eval "$ac_compile" 2>conftest.err)
- cat conftest.err >&AS_MESSAGE_LOG_FD
- (eval echo "\"\$as_me:__oline__: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
- (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
- cat conftest.err >&AS_MESSAGE_LOG_FD
- (eval echo "\"\$as_me:__oline__: output\"" >&AS_MESSAGE_LOG_FD)
- cat conftest.out >&AS_MESSAGE_LOG_FD
- if $GREP 'External.*some_variable' conftest.out > /dev/null; then
- lt_cv_nm_interface="MS dumpbin"
- fi
- rm -f conftest*])
-])# LT_PATH_NM
-
-# Old names:
-AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
-AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AM_PROG_NM], [])
-dnl AC_DEFUN([AC_PROG_NM], [])
-
-
-# LT_LIB_M
-# --------
-# check for math library
-AC_DEFUN([LT_LIB_M],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-LIBM=
-case $host in
-*-*-beos* | *-*-cygwin* | *-*-pw32* | *-*-darwin*)
- # These system don't have libm, or don't need it
- ;;
-*-ncr-sysv4.3*)
- AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
- AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
- ;;
-*)
- AC_CHECK_LIB(m, cos, LIBM="-lm")
- ;;
-esac
-AC_SUBST([LIBM])
-])# LT_LIB_M
-
-# Old name:
-AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_CHECK_LIBM], [])
-
-
-# _LT_COMPILER_NO_RTTI([TAGNAME])
-# -------------------------------
-m4_defun([_LT_COMPILER_NO_RTTI],
-[m4_require([_LT_TAG_COMPILER])dnl
-
-_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
-
-if test "$GCC" = yes; then
- _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
-
- _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
- lt_cv_prog_compiler_rtti_exceptions,
- [-fno-rtti -fno-exceptions], [],
- [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
-fi
-_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
- [Compiler flag to turn off builtin functions])
-])# _LT_COMPILER_NO_RTTI
-
-
-# _LT_CMD_GLOBAL_SYMBOLS
-# ----------------------
-m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-AC_REQUIRE([AC_PROG_CC])dnl
-AC_REQUIRE([LT_PATH_NM])dnl
-AC_REQUIRE([LT_PATH_LD])dnl
-m4_require([_LT_DECL_SED])dnl
-m4_require([_LT_DECL_EGREP])dnl
-m4_require([_LT_TAG_COMPILER])dnl
-
-# Check for command to grab the raw symbol name followed by C symbol from nm.
-AC_MSG_CHECKING([command to parse $NM output from $compiler object])
-AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
-[
-# These are sane defaults that work on at least a few old systems.
-# [They come from Ultrix. What could be older than Ultrix?!! ;)]
-
-# Character class describing NM global symbol codes.
-symcode='[[BCDEGRST]]'
-
-# Regexp to match symbols that can be accessed directly from C.
-sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
-
-# Define system-specific variables.
-case $host_os in
-aix*)
- symcode='[[BCDT]]'
- ;;
-cygwin* | mingw* | pw32* | cegcc*)
- symcode='[[ABCDGISTW]]'
- ;;
-hpux*)
- if test "$host_cpu" = ia64; then
- symcode='[[ABCDEGRST]]'
- fi
- ;;
-irix* | nonstopux*)
- symcode='[[BCDEGRST]]'
- ;;
-osf*)
- symcode='[[BCDEGQRST]]'
- ;;
-solaris*)
- symcode='[[BDRT]]'
- ;;
-sco3.2v5*)
- symcode='[[DT]]'
- ;;
-sysv4.2uw2*)
- symcode='[[DT]]'
- ;;
-sysv5* | sco5v6* | unixware* | OpenUNIX*)
- symcode='[[ABDT]]'
- ;;
-sysv4)
- symcode='[[DFNSTU]]'
- ;;
-esac
-
-# If we're using GNU nm, then use its standard symbol codes.
-case `$NM -V 2>&1` in
-*GNU* | *'with BFD'*)
- symcode='[[ABCDGIRSTW]]' ;;
-esac
-
-# Transform an extracted symbol line into a proper C declaration.
-# Some systems (esp. on ia64) link data and code symbols differently,
-# so use this general approach.
-lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
-# Transform an extracted symbol line into symbol name and symbol address
-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
-# Handle CRLF in mingw tool chain
-opt_cr=
-case $build_os in
-mingw*)
- opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
- ;;
-esac
-
-# Try without a prefix underscore, then with it.
-for ac_symprfx in "" "_"; do
-
- # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
- symxfrm="\\1 $ac_symprfx\\2 \\2"
-
- # Write the raw and C identifiers.
- if test "$lt_cv_nm_interface" = "MS dumpbin"; then
- # Fake it for dumpbin and say T for any non-static function
- # and D for any global variable.
- # Also find C++ and __fastcall symbols from MSVC++,
- # which start with @ or ?.
- lt_cv_sys_global_symbol_pipe="$AWK ['"\
-" {last_section=section; section=\$ 3};"\
-" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
-" \$ 0!~/External *\|/{next};"\
-" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
-" {if(hide[section]) next};"\
-" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
-" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
-" s[1]~/^[@?]/{print s[1], s[1]; next};"\
-" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
-" ' prfx=^$ac_symprfx]"
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-
- rm -f conftest*
- cat > conftest.$ac_ext <<_LT_EOF
-#ifdef __cplusplus
-extern "C" {
-#endif
-char nm_test_var;
-void nm_test_func(void);
-void nm_test_func(void){}
-#ifdef __cplusplus
-}
-#endif
-int main(){nm_test_var='a';nm_test_func();return(0);}
-_LT_EOF
-
- if AC_TRY_EVAL(ac_compile); then
- # Now try to grab the symbols.
- nlist=conftest.nm
- if AC_TRY_EVAL(NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) && test -s "$nlist"; then
- # Try sorting and uniquifying the output.
- if sort "$nlist" | uniq > "$nlist"T; then
- mv -f "$nlist"T "$nlist"
- else
- rm -f "$nlist"T
- fi
-
- # Make sure that we snagged all the symbols we need.
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-_LT_EOF
- # Now generate the symbol file.
- eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
-
- cat <<_LT_EOF >> conftest.$ac_ext
-
-/* The mapping between symbol names and symbols. */
-const struct {
- const char *name;
- void *address;
-}
-lt__PROGRAM__LTX_preloaded_symbols[[]] =
-{
- { "@PROGRAM@", (void *) 0 },
-_LT_EOF
- $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
- cat <<\_LT_EOF >> conftest.$ac_ext
- {0, (void *) 0}
-};
-
-/* This works around a problem in FreeBSD linker */
-#ifdef FREEBSD_WORKAROUND
-static const void *lt_preloaded_setup() {
- return lt__PROGRAM__LTX_preloaded_symbols;
-}
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-_LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
- lt_save_LIBS="$LIBS"
- lt_save_CFLAGS="$CFLAGS"
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
- if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
- LIBS="$lt_save_LIBS"
- CFLAGS="$lt_save_CFLAGS"
- else
- echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
- fi
- else
- echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
- fi
- else
- echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
- fi
- else
- echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
- cat conftest.$ac_ext >&5
- fi
- rm -rf conftest* conftst*
-
- # Do not use the global_symbol_pipe unless it works.
- if test "$pipe_works" = yes; then
- break
- else
- lt_cv_sys_global_symbol_pipe=
- fi
-done
-])
-if test -z "$lt_cv_sys_global_symbol_pipe"; then
- lt_cv_sys_global_symbol_to_cdecl=
-fi
-if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
- AC_MSG_RESULT(failed)
-else
- AC_MSG_RESULT(ok)
-fi
-
-_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
- [Take the output of nm and produce a listing of raw symbols and C names])
-_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
- [Transform the output of nm in a proper C declaration])
-_LT_DECL([global_symbol_to_c_name_address],
- [lt_cv_sys_global_symbol_to_c_name_address], [1],
- [Transform the output of nm in a C name address pair])
-_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
- [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
- [Transform the output of nm in a C name address pair when lib prefix is needed])
-]) # _LT_CMD_GLOBAL_SYMBOLS
-
-
-# _LT_COMPILER_PIC([TAGNAME])
-# ---------------------------
-m4_defun([_LT_COMPILER_PIC],
-[m4_require([_LT_TAG_COMPILER])dnl
-_LT_TAGVAR(lt_prog_compiler_wl, $1)=
-_LT_TAGVAR(lt_prog_compiler_pic, $1)=
-_LT_TAGVAR(lt_prog_compiler_static, $1)=
-
-AC_MSG_CHECKING([for $compiler option to produce PIC])
-m4_if([$1], [CXX], [
- # C++ specific cases for pic, static, wl, etc.
- if test "$GXX" = yes; then
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
-
- case $host_os in
- aix*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- m68k)
- # FIXME: we need at least 68020 code to build shared libraries, but
- # adding the `-m68020' flag to GCC prevents building anything better,
- # like `-m68040'.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
- ;;
- esac
- ;;
-
- beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
- # PIC is the default for these OSes.
- ;;
- mingw* | cygwin* | os2* | pw32* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- # Although the cygwin gcc ignores -fPIC, still need this for old-style
- # (--disable-auto-import) libraries
- m4_if([$1], [GCJ], [],
- [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
- ;;
- darwin* | rhapsody*)
- # PIC is the default on this platform
- # Common symbols not allowed in MH_DYLIB files
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
- ;;
- *djgpp*)
- # DJGPP does not support shared libraries at all
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=
- ;;
- interix[[3-9]]*)
- # Interix 3.x gcc -fpic/-fPIC options generate broken code.
- # Instead, we relocate shared libraries at runtime.
- ;;
- sysv4*MP*)
- if test -d /usr/nec; then
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
- fi
- ;;
- hpux*)
- # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
- # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
- # sets the default TLS model and affects inlining.
- case $host_cpu in
- hppa*64*)
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- esac
- ;;
- *qnx* | *nto*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- esac
- else
- case $host_os in
- aix[[4-9]]*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- else
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
- fi
- ;;
- chorus*)
- case $cc_basename in
- cxch68*)
- # Green Hills C++ Compiler
- # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
- ;;
- esac
- ;;
- dgux*)
- case $cc_basename in
- ec++*)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- ;;
- ghcx*)
- # Green Hills C++ Compiler
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
- ;;
- *)
- ;;
- esac
- ;;
- freebsd* | dragonfly*)
- # FreeBSD uses GNU C++
- ;;
- hpux9* | hpux10* | hpux11*)
- case $cc_basename in
- CC*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
- if test "$host_cpu" != ia64; then
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
- fi
- ;;
- aCC*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
- case $host_cpu in
- hppa*64*|ia64*)
- # +Z the default
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
- ;;
- esac
- ;;
- *)
- ;;
- esac
- ;;
- interix*)
- # This is c89, which is MS Visual C++ (no shared libs)
- # Anyone wants to do a port?
- ;;
- irix5* | irix6* | nonstopux*)
- case $cc_basename in
- CC*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- # CC pic flag -KPIC is the default.
- ;;
- *)
- ;;
- esac
- ;;
- linux* | k*bsd*-gnu)
- case $cc_basename in
- KCC*)
- # KAI C++ Compiler
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- ecpc* )
- # old Intel C++ for x86_64 which still supported -KPIC.
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
- ;;
- icpc* )
- # Intel C++, used to be incompatible with GCC.
- # ICC 10 doesn't accept -KPIC any more.
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
- ;;
- pgCC* | pgcpp*)
- # Portland Group C++ compiler
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
- cxx*)
- # Compaq C++
- # Make sure the PIC flag is empty. It appears that all Alpha
- # Linux and Compaq Tru64 Unix objects are PIC.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
- xlc* | xlC*)
- # IBM XL 8.0 on PPC
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
- ;;
- esac
- ;;
- esac
- ;;
- lynxos*)
- ;;
- m88k*)
- ;;
- mvs*)
- case $cc_basename in
- cxx*)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
- ;;
- *)
- ;;
- esac
- ;;
- netbsd*)
- ;;
- *qnx* | *nto*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
- ;;
- osf3* | osf4* | osf5*)
- case $cc_basename in
- KCC*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
- ;;
- RCC*)
- # Rational C++ 2.4.1
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
- ;;
- cxx*)
- # Digital/Compaq C++
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- # Make sure the PIC flag is empty. It appears that all Alpha
- # Linux and Compaq Tru64 Unix objects are PIC.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
- *)
- ;;
- esac
- ;;
- psos*)
- ;;
- solaris*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
- ;;
- gcx*)
- # Green Hills C++ Compiler
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
- ;;
- *)
- ;;
- esac
- ;;
- sunos4*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.x
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
- lcc*)
- # Lucid
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
- ;;
- *)
- ;;
- esac
- ;;
- sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
- case $cc_basename in
- CC*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
- esac
- ;;
- tandem*)
- case $cc_basename in
- NCC*)
- # NonStop-UX NCC 3.20
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- ;;
- *)
- ;;
- esac
- ;;
- vxworks*)
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
- ;;
- esac
- fi
-],
-[
- if test "$GCC" = yes; then
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
-
- case $host_os in
- aix*)
- # All AIX code is PIC.
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- m68k)
- # FIXME: we need at least 68020 code to build shared libraries, but
- # adding the `-m68020' flag to GCC prevents building anything better,
- # like `-m68040'.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
- ;;
- esac
- ;;
-
- beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
- # PIC is the default for these OSes.
- ;;
-
- mingw* | cygwin* | pw32* | os2* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- # Although the cygwin gcc ignores -fPIC, still need this for old-style
- # (--disable-auto-import) libraries
- m4_if([$1], [GCJ], [],
- [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
- ;;
-
- darwin* | rhapsody*)
- # PIC is the default on this platform
- # Common symbols not allowed in MH_DYLIB files
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
- ;;
-
- hpux*)
- # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
- # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
- # sets the default TLS model and affects inlining.
- case $host_cpu in
- hppa*64*)
- # +Z the default
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- esac
- ;;
-
- interix[[3-9]]*)
- # Interix 3.x gcc -fpic/-fPIC options generate broken code.
- # Instead, we relocate shared libraries at runtime.
- ;;
-
- msdosdjgpp*)
- # Just because we use GCC doesn't mean we suddenly get shared libraries
- # on systems that don't support them.
- _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
- enable_shared=no
- ;;
-
- *nto* | *qnx*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec; then
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
- fi
- ;;
-
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- ;;
- esac
- else
- # PORTME Check for flag to pass linker flags through the system compiler.
- case $host_os in
- aix*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- if test "$host_cpu" = ia64; then
- # AIX 5 now supports IA64 processor
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- else
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
- fi
- ;;
-
- mingw* | cygwin* | pw32* | os2* | cegcc*)
- # This hack is so that the source file can tell whether it is being
- # built for inclusion in a dll (and should export symbols for example).
- m4_if([$1], [GCJ], [],
- [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
- ;;
-
- hpux9* | hpux10* | hpux11*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
- # not for PA HP-UX.
- case $host_cpu in
- hppa*64*|ia64*)
- # +Z the default
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
- ;;
- esac
- # Is there a better lt_prog_compiler_static that works with the bundled CC?
- _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
- ;;
-
- irix5* | irix6* | nonstopux*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- # PIC (with -KPIC) is the default.
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
-
- linux* | k*bsd*-gnu)
- case $cc_basename in
- # old Intel for x86_64 which still supported -KPIC.
- ecc*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
- ;;
- # icc used to be incompatible with GCC.
- # ICC 10 doesn't accept -KPIC any more.
- icc* | ifort*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
- ;;
- # Lahey Fortran 8.1.
- lf95*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
- ;;
- pgcc* | pgf77* | pgf90* | pgf95*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
- ccc*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- # All Alpha code is PIC.
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
- xl*)
- # IBM XL C 8.0/Fortran 10.1 on PPC
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C 5.9
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- ;;
- *Sun\ F*)
- # Sun Fortran 8.3 passes all unrecognized flags to the linker
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
- ;;
- esac
- ;;
- esac
- ;;
-
- newsos6)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
-
- *nto* | *qnx*)
- # QNX uses GNU C++, but need to define -shared option too, otherwise
- # it will coredump.
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
- ;;
-
- osf3* | osf4* | osf5*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- # All OSF/1 code is PIC.
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
-
- rdos*)
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
- ;;
-
- solaris*)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- case $cc_basename in
- f77* | f90* | f95*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
- *)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
- esac
- ;;
-
- sunos4*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
-
- sysv4 | sysv4.2uw2* | sysv4.3*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec ;then
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- fi
- ;;
-
- sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
-
- unicos*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
- _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
- ;;
-
- uts4*)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- ;;
-
- *)
- _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
- ;;
- esac
- fi
-])
-case $host_os in
- # For platforms which do not support PIC, -DPIC is meaningless:
- *djgpp*)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=
- ;;
- *)
- _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
- ;;
-esac
-AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
-_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
- [How to pass a linker flag through the compiler])
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
- _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
- [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
- [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
- [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
- "" | " "*) ;;
- *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
- esac],
- [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
- _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
-fi
-_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
- [Additional compiler flags for building library objects])
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
-_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
- _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
- $lt_tmp_static_flag,
- [],
- [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
-_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
- [Compiler flag to prevent dynamic linking])
-])# _LT_COMPILER_PIC
-
-
-# _LT_LINKER_SHLIBS([TAGNAME])
-# ----------------------------
-# See if the linker supports building shared libraries.
-m4_defun([_LT_LINKER_SHLIBS],
-[AC_REQUIRE([LT_PATH_LD])dnl
-AC_REQUIRE([LT_PATH_NM])dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_DECL_EGREP])dnl
-m4_require([_LT_DECL_SED])dnl
-m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
-m4_require([_LT_TAG_COMPILER])dnl
-AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
-m4_if([$1], [CXX], [
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- case $host_os in
- aix[[4-9]]*)
- # If we're using GNU nm, then we don't want the "-C" option.
- # -C means demangle to AIX nm, but means don't demangle with GNU nm
- if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- else
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- fi
- ;;
- pw32*)
- _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
- ;;
- cygwin* | mingw* | cegcc*)
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
- ;;
- *)
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- ;;
- esac
- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
-], [
- runpath_var=
- _LT_TAGVAR(allow_undefined_flag, $1)=
- _LT_TAGVAR(always_export_symbols, $1)=no
- _LT_TAGVAR(archive_cmds, $1)=
- _LT_TAGVAR(archive_expsym_cmds, $1)=
- _LT_TAGVAR(compiler_needs_object, $1)=no
- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
- _LT_TAGVAR(export_dynamic_flag_spec, $1)=
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
- _LT_TAGVAR(hardcode_automatic, $1)=no
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_direct_absolute, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
- _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
- _LT_TAGVAR(hardcode_libdir_separator, $1)=
- _LT_TAGVAR(hardcode_minus_L, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
- _LT_TAGVAR(inherit_rpath, $1)=no
- _LT_TAGVAR(link_all_deplibs, $1)=unknown
- _LT_TAGVAR(module_cmds, $1)=
- _LT_TAGVAR(module_expsym_cmds, $1)=
- _LT_TAGVAR(old_archive_from_new_cmds, $1)=
- _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
- _LT_TAGVAR(thread_safe_flag_spec, $1)=
- _LT_TAGVAR(whole_archive_flag_spec, $1)=
- # include_expsyms should be a list of space-separated symbols to be *always*
- # included in the symbol list
- _LT_TAGVAR(include_expsyms, $1)=
- # exclude_expsyms can be an extended regexp of symbols to exclude
- # it will be wrapped by ` (' and `)$', so one must not match beginning or
- # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
- # as well as any symbol that contains `d'.
- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
- # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
- # platforms (ab)use it in PIC code, but their linkers get confused if
- # the symbol is explicitly referenced. Since portable code cannot
- # rely on this symbol name, it's probably fine to never include it in
- # preloaded symbol tables.
- # Exclude shared library initialization/finalization symbols.
-dnl Note also adjust exclude_expsyms for C++ above.
- extract_expsyms_cmds=
-
- case $host_os in
- cygwin* | mingw* | pw32* | cegcc*)
- # FIXME: the MSVC++ port hasn't been tested in a loooong time
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- if test "$GCC" != yes; then
- with_gnu_ld=no
- fi
- ;;
- interix*)
- # we just hope/assume this is gcc and not c89 (= MSVC++)
- with_gnu_ld=yes
- ;;
- openbsd*)
- with_gnu_ld=no
- ;;
- esac
-
- _LT_TAGVAR(ld_shlibs, $1)=yes
- if test "$with_gnu_ld" = yes; then
- # If archive_cmds runs LD, not CC, wlarc should be empty
- wlarc='${wl}'
-
- # Set some defaults for GNU ld with shared library support. These
- # are reset later if shared libraries are not supported. Putting them
- # here allows them to be overridden if necessary.
- runpath_var=LD_RUN_PATH
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
- # ancient GNU ld didn't support --whole-archive et. al.
- if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
- _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- else
- _LT_TAGVAR(whole_archive_flag_spec, $1)=
- fi
- supports_anon_versioning=no
- case `$LD -v 2>&1` in
- *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
- *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
- *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
- *\ 2.11.*) ;; # other 2.11 versions
- *) supports_anon_versioning=yes ;;
- esac
-
- # See if GNU ld supports shared libraries.
- case $host_os in
- aix[[3-9]]*)
- # On AIX/PPC, the GNU linker is very broken
- if test "$host_cpu" != ia64; then
- _LT_TAGVAR(ld_shlibs, $1)=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.9.1, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support. If you
-*** really care for shared libraries, you may want to modify your PATH
-*** so that a non-GNU linker is found, and then restart.
-
-_LT_EOF
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)=''
- ;;
- m68k)
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- ;;
- esac
- ;;
-
- beos*)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
- # support --undefined. This deserves some investigation. FIXME
- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
- # as there is no search path for DLLs.
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- _LT_TAGVAR(always_export_symbols, $1)=no
- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- # If the export-symbols file already is a .def file (1st line
- # is EXPORTS), use it as is; otherwise, prepend...
- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
- cp $export_symbols $output_objdir/$soname.def;
- else
- echo EXPORTS > $output_objdir/$soname.def;
- cat $export_symbols >> $output_objdir/$soname.def;
- fi~
- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- interix[[3-9]]*)
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
- # Instead, shared libraries are loaded at an image base (0x10000000 by
- # default) and relocated if they conflict, which is a slow very memory
- # consuming and fragmenting process. To avoid this, we pick a random,
- # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
- # time. Moving up from 0x10000000 also allows more sbrk(2) space.
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- ;;
-
- gnu* | linux* | tpf* | k*bsd*-gnu)
- tmp_diet=no
- if test "$host_os" = linux-dietlibc; then
- case $cc_basename in
- diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn)
- esac
- fi
- if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
- && test "$tmp_diet" = no
- then
- tmp_addflag=
- tmp_sharedflag='-shared'
- case $cc_basename,$host_cpu in
- pgcc*) # Portland Group C compiler
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- tmp_addflag=' $pic_flag'
- ;;
- pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- tmp_addflag=' $pic_flag -Mnomain' ;;
- ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64
- tmp_addflag=' -i_dynamic' ;;
- efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64
- tmp_addflag=' -i_dynamic -nofor_main' ;;
- ifc* | ifort*) # Intel Fortran compiler
- tmp_addflag=' -nofor_main' ;;
- lf95*) # Lahey Fortran 8.1
- _LT_TAGVAR(whole_archive_flag_spec, $1)=
- tmp_sharedflag='--shared' ;;
- xl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
- tmp_sharedflag='-qmkshrobj'
- tmp_addflag= ;;
- esac
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*) # Sun C 5.9
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- _LT_TAGVAR(compiler_needs_object, $1)=yes
- tmp_sharedflag='-G' ;;
- *Sun\ F*) # Sun Fortran 8.3
- tmp_sharedflag='-G' ;;
- esac
- _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
- if test "x$supports_anon_versioning" = xyes; then
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
- fi
-
- case $cc_basename in
- xlf*)
- # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
- _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
- _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir'
- _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
- solaris*)
- if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
- _LT_TAGVAR(ld_shlibs, $1)=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems. Therefore, libtool
-*** is disabling shared libraries support. We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer. Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
- case `$LD -v 2>&1` in
- *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
- _LT_TAGVAR(ld_shlibs, $1)=no
- cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems. Therefore, libtool
-*** is disabling shared libraries support. We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer. Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
- ;;
- *)
- # For security reasons, it is highly recommended that you always
- # use absolute paths for naming shared libraries, and exclude the
- # DT_RUNPATH tag from executables and libraries. But doing so
- # requires that you compile everything twice, which is a pain.
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- esac
- ;;
-
- sunos4*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- wlarc=
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- esac
-
- if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
- runpath_var=
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
- _LT_TAGVAR(export_dynamic_flag_spec, $1)=
- _LT_TAGVAR(whole_archive_flag_spec, $1)=
- fi
- else
- # PORTME fill in a description of your system's linker (not GNU ld)
- case $host_os in
- aix3*)
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- _LT_TAGVAR(always_export_symbols, $1)=yes
- _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
- # Note: this linker hardcodes the directories in LIBPATH if there
- # are no directories specified by -L.
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
- # Neither direct hardcoding nor static linking is supported with a
- # broken collect2.
- _LT_TAGVAR(hardcode_direct, $1)=unsupported
- fi
- ;;
-
- aix[[4-9]]*)
- if test "$host_cpu" = ia64; then
- # On IA64, the linker does run time linking by default, so we don't
- # have to do anything special.
- aix_use_runtimelinking=no
- exp_sym_flag='-Bexport'
- no_entry_flag=""
- else
- # If we're using GNU nm, then we don't want the "-C" option.
- # -C means demangle to AIX nm, but means don't demangle with GNU nm
- if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- else
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
- fi
- aix_use_runtimelinking=no
-
- # Test if we are trying to use run time linking or normal
- # AIX style linking. If -brtl is somewhere in LDFLAGS, we
- # need to do runtime linking.
- case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
- for ld_flag in $LDFLAGS; do
- if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
- aix_use_runtimelinking=yes
- break
- fi
- done
- ;;
- esac
-
- exp_sym_flag='-bexport'
- no_entry_flag='-bnoentry'
- fi
-
- # When large executables or shared objects are built, AIX ld can
- # have problems creating the table of contents. If linking a library
- # or program results in "error TOC overflow" add -mminimal-toc to
- # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
- # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
- _LT_TAGVAR(archive_cmds, $1)=''
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
-
- if test "$GCC" = yes; then
- case $host_os in aix4.[[012]]|aix4.[[012]].*)
- # We only want to do this on AIX 4.2 and lower, the check
- # below for broken collect2 doesn't work under 4.3+
- collect2name=`${CC} -print-prog-name=collect2`
- if test -f "$collect2name" &&
- strings "$collect2name" | $GREP resolve_lib_name >/dev/null
- then
- # We have reworked collect2
- :
- else
- # We have old collect2
- _LT_TAGVAR(hardcode_direct, $1)=unsupported
- # It fails to find uninstalled libraries when the uninstalled
- # path is not listed in the libpath. Setting hardcode_minus_L
- # to unsupported forces relinking
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=
- fi
- ;;
- esac
- shared_flag='-shared'
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag="$shared_flag "'${wl}-G'
- fi
- else
- # not using gcc
- if test "$host_cpu" = ia64; then
- # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
- # chokes on -Wl,-G. The following line is correct:
- shared_flag='-G'
- else
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag='${wl}-G'
- else
- shared_flag='${wl}-bM:SRE'
- fi
- fi
- fi
-
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
- # It seems that -bexpall does not export symbols beginning with
- # underscore (_), so it is better to generate a list of symbols to export.
- _LT_TAGVAR(always_export_symbols, $1)=yes
- if test "$aix_use_runtimelinking" = yes; then
- # Warning - without using the other runtime loading flags (-brtl),
- # -berok will link without error, but may produce a broken library.
- _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
- _LT_SYS_MODULE_PATH_AIX
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
- else
- if test "$host_cpu" = ia64; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
- _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
- _LT_SYS_MODULE_PATH_AIX
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
- _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
- # Exported symbols can be pulled into shared objects from archives
- _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
- # This is similar to how AIX traditionally builds its shared libraries.
- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
- fi
- fi
- ;;
-
- amigaos*)
- case $host_cpu in
- powerpc)
- # see comment about AmigaOS4 .so support
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)=''
- ;;
- m68k)
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- ;;
- esac
- ;;
-
- bsdi[[45]]*)
- _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- # Tell ltmain to make .lib files, not .a files.
- libext=lib
- # Tell ltmain to make .dll files, not .so files.
- shrext_cmds=".dll"
- # FIXME: Setting linknames here is a bad hack.
- _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames='
- # The linker will automatically build a .lib file if we build a DLL.
- _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
- # FIXME: Should let the user specify the lib program.
- _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
- _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`'
- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
- ;;
-
- darwin* | rhapsody*)
- _LT_DARWIN_LINKER_FEATURES($1)
- ;;
-
- dgux*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- freebsd1*)
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
- # support. Future versions do this automatically, but an explicit c++rt0.o
- # does not break anything, and helps significantly (at the cost of a little
- # extra space).
- freebsd2.2*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- # Unfortunately, older versions of FreeBSD 2 do not have this feature.
- freebsd2*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- hpux9*)
- if test "$GCC" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(hardcode_direct, $1)=yes
-
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- ;;
-
- hpux10*)
- if test "$GCC" = yes -a "$with_gnu_ld" = no; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
- if test "$with_gnu_ld" = no; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- fi
- ;;
-
- hpux11*)
- if test "$GCC" = yes -a "$with_gnu_ld" = no; then
- case $host_cpu in
- hppa*64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
- case $host_cpu in
- hppa*64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- fi
- if test "$with_gnu_ld" = no; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- case $host_cpu in
- hppa*64*|ia64*)
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
- *)
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
-
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- ;;
- esac
- fi
- ;;
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
- save_LDFLAGS="$LDFLAGS"
- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
- AC_LINK_IFELSE(int foo(void) {},
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
- )
- LDFLAGS="$save_LDFLAGS"
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
- fi
- _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(inherit_rpath, $1)=yes
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out
- else
- _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF
- fi
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- newsos6)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- *nto* | *qnx*)
- ;;
-
- openbsd*)
- if test -f /usr/libexec/ld.so; then
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- else
- case $host_os in
- openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- ;;
- esac
- fi
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- os2*)
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
- _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
- ;;
-
- osf3*)
- if test "$GCC" = yes; then
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
- _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- fi
- _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- ;;
-
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- else
- _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
- $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
- # Both c and cxx compiler support -rpath directly
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
- fi
- _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- ;;
-
- solaris*)
- _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
- wlarc=''
- _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
- ;;
- *)
- wlarc='${wl}'
- _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- ;;
- esac
- fi
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- case $host_os in
- solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
- *)
- # The compiler driver will combine and reorder linker options,
- # but understands `-z linker_flag'. GCC discards it without `$wl',
- # but is careful enough not to reorder.
- # Supported since Solaris 2.6 (maybe 2.5.1?)
- if test "$GCC" = yes; then
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
- else
- _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
- fi
- ;;
- esac
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- ;;
-
- sunos4*)
- if test "x$host_vendor" = xsequent; then
- # Use $CC to link under sequent, because it throws in some extra .o
- # files that make .init and .fini sections work.
- _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
- fi
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- sysv4)
- case $host_vendor in
- sni)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
- ;;
- siemens)
- ## LD is ld it makes a PLAMLIB
- ## CC just makes a GrossModule.
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
- _LT_TAGVAR(hardcode_direct, $1)=no
- ;;
- motorola)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
- ;;
- esac
- runpath_var='LD_RUN_PATH'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- sysv4.3*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
- ;;
-
- sysv4*MP*)
- if test -d /usr/nec; then
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- runpath_var=LD_RUN_PATH
- hardcode_runpath_var=yes
- _LT_TAGVAR(ld_shlibs, $1)=yes
- fi
- ;;
-
- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
- _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- runpath_var='LD_RUN_PATH'
-
- if test "$GCC" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- fi
- ;;
-
- sysv5* | sco3.2v5* | sco5v6*)
- # Note: We can NOT use -z defs as we might desire, because we do not
- # link with -lc, and that would cause any symbols used from libc to
- # always be unresolved, which means just about no library would
- # ever link correctly. If we're not using GNU ld we use -z text
- # though, which does catch some bad symbols but isn't as heavy-handed
- # as -z defs.
- _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
- _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
- runpath_var='LD_RUN_PATH'
-
- if test "$GCC" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- fi
- ;;
-
- uts4*)
- _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
-
- *)
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
-
- if test x$host_vendor = xsni; then
- case $host in
- sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
- ;;
- esac
- fi
- fi
-])
-AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
-test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
-
-_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
-
-_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
-_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
-_LT_DECL([], [extract_expsyms_cmds], [2],
- [The commands to extract the exported symbol list from a shared archive])
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
-x|xyes)
- # Assume -lc should be added
- _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
-
- if test "$enable_shared" = yes && test "$GCC" = yes; then
- case $_LT_TAGVAR(archive_cmds, $1) in
- *'~'*)
- # FIXME: we may have to deal with multi-command sequences.
- ;;
- '$CC '*)
- # Test whether the compiler implicitly links with -lc since on some
- # systems, -lgcc has to come before -lc. If gcc already passes -lc
- # to ld, don't add -lc before -lgcc.
- AC_MSG_CHECKING([whether -lc should be explicitly linked in])
- $RM conftest*
- echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
- if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
- soname=conftest
- lib=conftest
- libobjs=conftest.$ac_objext
- deplibs=
- wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
- pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
- compiler_flags=-v
- linker_flags=-v
- verstring=
- output_objdir=.
- libname=conftest
- lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
- _LT_TAGVAR(allow_undefined_flag, $1)=
- if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
- then
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- else
- _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
- fi
- _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
- else
- cat conftest.err 1>&5
- fi
- $RM conftest*
- AC_MSG_RESULT([$_LT_TAGVAR(archive_cmds_need_lc, $1)])
- ;;
- esac
- fi
- ;;
-esac
-
-_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
- [Whether or not to add -lc for building shared libraries])
-_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
- [enable_shared_with_static_runtimes], [0],
- [Whether or not to disallow shared libs when runtime libs are static])
-_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
- [Compiler flag to allow reflexive dlopens])
-_LT_TAGDECL([], [whole_archive_flag_spec], [1],
- [Compiler flag to generate shared objects directly from archives])
-_LT_TAGDECL([], [compiler_needs_object], [1],
- [Whether the compiler copes with passing no objects directly])
-_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
- [Create an old-style archive from a shared archive])
-_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
- [Create a temporary old-style archive to link instead of a shared archive])
-_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
-_LT_TAGDECL([], [archive_expsym_cmds], [2])
-_LT_TAGDECL([], [module_cmds], [2],
- [Commands used to build a loadable module if different from building
- a shared archive.])
-_LT_TAGDECL([], [module_expsym_cmds], [2])
-_LT_TAGDECL([], [with_gnu_ld], [1],
- [Whether we are building with GNU ld or not])
-_LT_TAGDECL([], [allow_undefined_flag], [1],
- [Flag that allows shared libraries with undefined symbols to be built])
-_LT_TAGDECL([], [no_undefined_flag], [1],
- [Flag that enforces no undefined symbols])
-_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
- [Flag to hardcode $libdir into a binary during linking.
- This must work even if $libdir does not exist])
-_LT_TAGDECL([], [hardcode_libdir_flag_spec_ld], [1],
- [[If ld is used when linking, flag to hardcode $libdir into a binary
- during linking. This must work even if $libdir does not exist]])
-_LT_TAGDECL([], [hardcode_libdir_separator], [1],
- [Whether we need a single "-rpath" flag with a separated argument])
-_LT_TAGDECL([], [hardcode_direct], [0],
- [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
- DIR into the resulting binary])
-_LT_TAGDECL([], [hardcode_direct_absolute], [0],
- [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
- DIR into the resulting binary and the resulting library dependency is
- "absolute", i.e impossible to change by setting ${shlibpath_var} if the
- library is relocated])
-_LT_TAGDECL([], [hardcode_minus_L], [0],
- [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
- into the resulting binary])
-_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
- [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
- into the resulting binary])
-_LT_TAGDECL([], [hardcode_automatic], [0],
- [Set to "yes" if building a shared library automatically hardcodes DIR
- into the library and all subsequent libraries and executables linked
- against it])
-_LT_TAGDECL([], [inherit_rpath], [0],
- [Set to yes if linker adds runtime paths of dependent libraries
- to runtime path list])
-_LT_TAGDECL([], [link_all_deplibs], [0],
- [Whether libtool must link a program against all its dependency libraries])
-_LT_TAGDECL([], [fix_srcfile_path], [1],
- [Fix the shell variable $srcfile for the compiler])
-_LT_TAGDECL([], [always_export_symbols], [0],
- [Set to "yes" if exported symbols are required])
-_LT_TAGDECL([], [export_symbols_cmds], [2],
- [The commands to list exported symbols])
-_LT_TAGDECL([], [exclude_expsyms], [1],
- [Symbols that should not be listed in the preloaded symbols])
-_LT_TAGDECL([], [include_expsyms], [1],
- [Symbols that must always be exported])
-_LT_TAGDECL([], [prelink_cmds], [2],
- [Commands necessary for linking programs (against libraries) with templates])
-_LT_TAGDECL([], [file_list_spec], [1],
- [Specify filename containing input files])
-dnl FIXME: Not yet implemented
-dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
-dnl [Compiler flag to generate thread safe objects])
-])# _LT_LINKER_SHLIBS
-
-
-# _LT_LANG_C_CONFIG([TAG])
-# ------------------------
-# Ensure that the configuration variables for a C compiler are suitably
-# defined. These variables are subsequently used by _LT_CONFIG to write
-# the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_C_CONFIG],
-[m4_require([_LT_DECL_EGREP])dnl
-lt_save_CC="$CC"
-AC_LANG_PUSH(C)
-
-# Source file extension for C test sources.
-ac_ext=c
-
-# Object file extension for compiled C test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code="int some_variable = 0;"
-
-# Code to be used in simple link tests
-lt_simple_link_test_code='int main(){return(0);}'
-
-_LT_TAG_COMPILER
-# Save the default compiler, since it gets overwritten when the other
-# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
-compiler_DEFAULT=$CC
-
-# save warnings/boilerplate of simple test code
-_LT_COMPILER_BOILERPLATE
-_LT_LINKER_BOILERPLATE
-
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
-if test -n "$compiler"; then
- _LT_COMPILER_NO_RTTI($1)
- _LT_COMPILER_PIC($1)
- _LT_COMPILER_C_O($1)
- _LT_COMPILER_FILE_LOCKS($1)
- _LT_LINKER_SHLIBS($1)
- _LT_SYS_DYNAMIC_LINKER($1)
- _LT_LINKER_HARDCODE_LIBPATH($1)
- LT_SYS_DLOPEN_SELF
- _LT_CMD_STRIPLIB
-
- # Report which library types will actually be built
- AC_MSG_CHECKING([if libtool supports shared libraries])
- AC_MSG_RESULT([$can_build_shared])
-
- AC_MSG_CHECKING([whether to build shared libraries])
- test "$can_build_shared" = "no" && enable_shared=no
-
- # On AIX, shared libraries and static libraries use the same namespace, and
- # are all built from PIC.
- case $host_os in
- aix3*)
- test "$enable_shared" = yes && enable_static=no
- if test -n "$RANLIB"; then
- archive_cmds="$archive_cmds~\$RANLIB \$lib"
- postinstall_cmds='$RANLIB $lib'
- fi
- ;;
-
- aix[[4-9]]*)
- if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
- test "$enable_shared" = yes && enable_static=no
- fi
- ;;
- esac
- AC_MSG_RESULT([$enable_shared])
-
- AC_MSG_CHECKING([whether to build static libraries])
- # Make sure either enable_shared or enable_static is yes.
- test "$enable_shared" = yes || enable_static=yes
- AC_MSG_RESULT([$enable_static])
-
- _LT_CONFIG($1)
-fi
-AC_LANG_POP
-CC="$lt_save_CC"
-])# _LT_LANG_C_CONFIG
-
-
-# _LT_PROG_CXX
-# ------------
-# Since AC_PROG_CXX is broken, in that it returns g++ if there is no c++
-# compiler, we have our own version here.
-m4_defun([_LT_PROG_CXX],
-[
-pushdef([AC_MSG_ERROR], [_lt_caught_CXX_error=yes])
-AC_PROG_CXX
-if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
- ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
- (test "X$CXX" != "Xg++"))) ; then
- AC_PROG_CXXCPP
-else
- _lt_caught_CXX_error=yes
-fi
-popdef([AC_MSG_ERROR])
-])# _LT_PROG_CXX
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([_LT_PROG_CXX], [])
-
-
-# _LT_LANG_CXX_CONFIG([TAG])
-# --------------------------
-# Ensure that the configuration variables for a C++ compiler are suitably
-# defined. These variables are subsequently used by _LT_CONFIG to write
-# the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_CXX_CONFIG],
-[AC_REQUIRE([_LT_PROG_CXX])dnl
-m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-m4_require([_LT_DECL_EGREP])dnl
-
-AC_LANG_PUSH(C++)
-_LT_TAGVAR(archive_cmds_need_lc, $1)=no
-_LT_TAGVAR(allow_undefined_flag, $1)=
-_LT_TAGVAR(always_export_symbols, $1)=no
-_LT_TAGVAR(archive_expsym_cmds, $1)=
-_LT_TAGVAR(compiler_needs_object, $1)=no
-_LT_TAGVAR(export_dynamic_flag_spec, $1)=
-_LT_TAGVAR(hardcode_direct, $1)=no
-_LT_TAGVAR(hardcode_direct_absolute, $1)=no
-_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
-_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
-_LT_TAGVAR(hardcode_libdir_separator, $1)=
-_LT_TAGVAR(hardcode_minus_L, $1)=no
-_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
-_LT_TAGVAR(hardcode_automatic, $1)=no
-_LT_TAGVAR(inherit_rpath, $1)=no
-_LT_TAGVAR(module_cmds, $1)=
-_LT_TAGVAR(module_expsym_cmds, $1)=
-_LT_TAGVAR(link_all_deplibs, $1)=unknown
-_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
-_LT_TAGVAR(no_undefined_flag, $1)=
-_LT_TAGVAR(whole_archive_flag_spec, $1)=
-_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
-
-# Source file extension for C++ test sources.
-ac_ext=cpp
-
-# Object file extension for compiled C++ test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# No sense in running all these tests if we already determined that
-# the CXX compiler isn't working. Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_caught_CXX_error" != yes; then
- # Code to be used in simple compile tests
- lt_simple_compile_test_code="int some_variable = 0;"
-
- # Code to be used in simple link tests
- lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
-
- # ltmain only uses $CC for tagged configurations so make sure $CC is set.
- _LT_TAG_COMPILER
-
- # save warnings/boilerplate of simple test code
- _LT_COMPILER_BOILERPLATE
- _LT_LINKER_BOILERPLATE
-
- # Allow CC to be a program name with arguments.
- lt_save_CC=$CC
- lt_save_LD=$LD
- lt_save_GCC=$GCC
- GCC=$GXX
- lt_save_with_gnu_ld=$with_gnu_ld
- lt_save_path_LD=$lt_cv_path_LD
- if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
- lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
- else
- $as_unset lt_cv_prog_gnu_ld
- fi
- if test -n "${lt_cv_path_LDCXX+set}"; then
- lt_cv_path_LD=$lt_cv_path_LDCXX
- else
- $as_unset lt_cv_path_LD
- fi
- test -z "${LDCXX+set}" || LD=$LDCXX
- CC=${CXX-"c++"}
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
-
- if test -n "$compiler"; then
- # We don't want -fno-exception when compiling C++ code, so set the
- # no_builtin_flag separately
- if test "$GXX" = yes; then
- _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
- else
- _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
- fi
-
- if test "$GXX" = yes; then
- # Set up default GNU C++ configuration
-
- LT_PATH_LD
-
- # Check if GNU C++ uses GNU ld as the underlying linker, since the
- # archiving commands below assume that GNU ld is being used.
- if test "$with_gnu_ld" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
-
- # If archive_cmds runs LD, not CC, wlarc should be empty
- # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
- # investigate it a little bit more. (MM)
- wlarc='${wl}'
-
- # ancient GNU ld didn't support --whole-archive et. al.
- if eval "`$CC -print-prog-name=ld` --help 2>&1" |
- $GREP 'no-whole-archive' > /dev/null; then
- _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- else
- _LT_TAGVAR(whole_archive_flag_spec, $1)=
- fi
- else
- with_gnu_ld=no
- wlarc=
-
- # A generic and very simple default shared library creation
- # command for GNU C++ for the case where it uses the native
- # linker, instead of GNU ld. If possible, this setting should
- # overridden to take advantage of the native linker features on
- # the platform it is being used on.
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
- fi
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
-
- else
- GXX=no
- with_gnu_ld=no
- wlarc=
- fi
-
- # PORTME: fill in a description of your system's C++ link characteristics
- AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
- _LT_TAGVAR(ld_shlibs, $1)=yes
- case $host_os in
- aix3*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- aix[[4-9]]*)
- if test "$host_cpu" = ia64; then
- # On IA64, the linker does run time linking by default, so we don't
- # have to do anything special.
- aix_use_runtimelinking=no
- exp_sym_flag='-Bexport'
- no_entry_flag=""
- else
- aix_use_runtimelinking=no
-
- # Test if we are trying to use run time linking or normal
- # AIX style linking. If -brtl is somewhere in LDFLAGS, we
- # need to do runtime linking.
- case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
- for ld_flag in $LDFLAGS; do
- case $ld_flag in
- *-brtl*)
- aix_use_runtimelinking=yes
- break
- ;;
- esac
- done
- ;;
- esac
-
- exp_sym_flag='-bexport'
- no_entry_flag='-bnoentry'
- fi
-
- # When large executables or shared objects are built, AIX ld can
- # have problems creating the table of contents. If linking a library
- # or program results in "error TOC overflow" add -mminimal-toc to
- # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
- # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
- _LT_TAGVAR(archive_cmds, $1)=''
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
-
- if test "$GXX" = yes; then
- case $host_os in aix4.[[012]]|aix4.[[012]].*)
- # We only want to do this on AIX 4.2 and lower, the check
- # below for broken collect2 doesn't work under 4.3+
- collect2name=`${CC} -print-prog-name=collect2`
- if test -f "$collect2name" &&
- strings "$collect2name" | $GREP resolve_lib_name >/dev/null
- then
- # We have reworked collect2
- :
- else
- # We have old collect2
- _LT_TAGVAR(hardcode_direct, $1)=unsupported
- # It fails to find uninstalled libraries when the uninstalled
- # path is not listed in the libpath. Setting hardcode_minus_L
- # to unsupported forces relinking
- _LT_TAGVAR(hardcode_minus_L, $1)=yes
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=
- fi
- esac
- shared_flag='-shared'
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag="$shared_flag "'${wl}-G'
- fi
- else
- # not using gcc
- if test "$host_cpu" = ia64; then
- # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
- # chokes on -Wl,-G. The following line is correct:
- shared_flag='-G'
- else
- if test "$aix_use_runtimelinking" = yes; then
- shared_flag='${wl}-G'
- else
- shared_flag='${wl}-bM:SRE'
- fi
- fi
- fi
-
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
- # It seems that -bexpall does not export symbols beginning with
- # underscore (_), so it is better to generate a list of symbols to
- # export.
- _LT_TAGVAR(always_export_symbols, $1)=yes
- if test "$aix_use_runtimelinking" = yes; then
- # Warning - without using the other runtime loading flags (-brtl),
- # -berok will link without error, but may produce a broken library.
- _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
- # Determine the default libpath from the value encoded in an empty
- # executable.
- _LT_SYS_MODULE_PATH_AIX
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
-
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
- else
- if test "$host_cpu" = ia64; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
- _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
- _LT_SYS_MODULE_PATH_AIX
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
- _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
- # Exported symbols can be pulled into shared objects from archives
- _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
- # This is similar to how AIX traditionally builds its shared
- # libraries.
- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
- fi
- fi
- ;;
-
- beos*)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
- # support --undefined. This deserves some investigation. FIXME
- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- chorus*)
- case $cc_basename in
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
- # as there is no search path for DLLs.
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- _LT_TAGVAR(always_export_symbols, $1)=no
- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- # If the export-symbols file already is a .def file (1st line
- # is EXPORTS), use it as is; otherwise, prepend...
- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
- cp $export_symbols $output_objdir/$soname.def;
- else
- echo EXPORTS > $output_objdir/$soname.def;
- cat $export_symbols >> $output_objdir/$soname.def;
- fi~
- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- darwin* | rhapsody*)
- _LT_DARWIN_LINKER_FEATURES($1)
- ;;
-
- dgux*)
- case $cc_basename in
- ec++*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- ghcx*)
- # Green Hills C++ Compiler
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
- ;;
-
- freebsd[[12]]*)
- # C++ shared libraries reported to be fairly broken before
- # switch to ELF
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- freebsd-elf*)
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- ;;
-
- freebsd* | dragonfly*)
- # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
- # conventions
- _LT_TAGVAR(ld_shlibs, $1)=yes
- ;;
-
- gnu*)
- ;;
-
- hpux9*)
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
- # but as the default
- # location of the library.
-
- case $cc_basename in
- CC*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- aCC*)
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes; then
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- esac
- ;;
-
- hpux10*|hpux11*)
- if test $with_gnu_ld = no; then
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- case $host_cpu in
- hppa*64*|ia64*)
- ;;
- *)
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- ;;
- esac
- fi
- case $host_cpu in
- hppa*64*|ia64*)
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- ;;
- *)
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
- # but as the default
- # location of the library.
- ;;
- esac
-
- case $cc_basename in
- CC*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- aCC*)
- case $host_cpu in
- hppa*64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes; then
- if test $with_gnu_ld = no; then
- case $host_cpu in
- hppa*64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- fi
- else
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- esac
- ;;
-
- interix[[3-9]]*)
- _LT_TAGVAR(hardcode_direct, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
- # Instead, shared libraries are loaded at an image base (0x10000000 by
- # default) and relocated if they conflict, which is a slow very memory
- # consuming and fragmenting process. To avoid this, we pick a random,
- # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
- # time. Moving up from 0x10000000 also allows more sbrk(2) space.
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- ;;
- irix5* | irix6*)
- case $cc_basename in
- CC*)
- # SGI C++
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
-
- # Archives containing C++ object files must be created using
- # "CC -ar", where "CC" is the IRIX C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
- ;;
- *)
- if test "$GXX" = yes; then
- if test "$with_gnu_ld" = no; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` -o $lib'
- fi
- fi
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- ;;
- esac
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
- _LT_TAGVAR(inherit_rpath, $1)=yes
- ;;
-
- linux* | k*bsd*-gnu)
- case $cc_basename in
- KCC*)
- # Kuck and Associates, Inc. (KAI) C++ Compiler
-
- # KCC will only create a shared library if the output file
- # ends with ".so" (or ".sl" for HP-UX), so rename the library
- # to its proper name (with version) after linking.
- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
-
- # Archives containing C++ object files must be created using
- # "CC -Bstatic", where "CC" is the KAI C++ compiler.
- _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
- ;;
- icpc* | ecpc* )
- # Intel C++
- with_gnu_ld=yes
- # version 8.0 and above of icpc choke on multiply defined symbols
- # if we add $predep_objects and $postdep_objects, however 7.1 and
- # earlier do not add the objects themselves.
- case `$CC -V 2>&1` in
- *"Version 7."*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- ;;
- *) # Version 8.0 or newer
- tmp_idyn=
- case $host_cpu in
- ia64*) tmp_idyn=' -i_dynamic';;
- esac
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- ;;
- esac
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
- ;;
- pgCC* | pgcpp*)
- # Portland Group C++ compiler
- case `$CC -V` in
- *pgCC\ [[1-5]]* | *pgcpp\ [[1-5]]*)
- _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"'
- _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~
- $RANLIB $oldlib'
- _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- *) # Version 6 will use weak symbols
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- esac
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- ;;
- cxx*)
- # Compaq C++
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
-
- runpath_var=LD_RUN_PATH
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- xl*)
- # IBM XL 8.0 on PPC, with GNU ld
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
- _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
- $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
- fi
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
- _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
- _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive'
- _LT_TAGVAR(compiler_needs_object, $1)=yes
-
- # Not sure whether something based on
- # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
- # would be better.
- output_verbose_link_cmd='echo'
-
- # Archives containing C++ object files must be created using
- # "CC -xar", where "CC" is the Sun C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
- ;;
- esac
- ;;
- esac
- ;;
-
- lynxos*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- m88k*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- mvs*)
- case $cc_basename in
- cxx*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
- ;;
-
- netbsd*)
- if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
- wlarc=
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- fi
- # Workaround some broken pre-1.5 toolchains
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
- ;;
-
- *nto* | *qnx*)
- _LT_TAGVAR(ld_shlibs, $1)=yes
- ;;
-
- openbsd2*)
- # C++ shared libraries are fairly broken
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- openbsd*)
- if test -f /usr/libexec/ld.so; then
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
- _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
- fi
- output_verbose_link_cmd=echo
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
-
- osf3* | osf4* | osf5*)
- case $cc_basename in
- KCC*)
- # Kuck and Associates, Inc. (KAI) C++ Compiler
-
- # KCC will only create a shared library if the output file
- # ends with ".so" (or ".sl" for HP-UX), so rename the library
- # to its proper name (with version) after linking.
- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- # Archives containing C++ object files must be created using
- # the KAI C++ compiler.
- case $host in
- osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
- *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
- esac
- ;;
- RCC*)
- # Rational C++ 2.4.1
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- cxx*)
- case $host in
- osf3*)
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && $ECHO "X${wl}-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- ;;
- *)
- _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
- echo "-hidden">> $lib.exp~
- $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~
- $RM $lib.exp'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
- ;;
- esac
-
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- #
- # There doesn't appear to be a way to prevent this compiler from
- # explicitly linking system object files so we need to strip them
- # from the output so that they don't get included in the library
- # dependencies.
- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed'
- ;;
- *)
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
- case $host in
- osf3*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- esac
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=:
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
-
- else
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
- ;;
- esac
- ;;
-
- psos*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- sunos4*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.x
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- lcc*)
- # Lucid
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
- ;;
-
- solaris*)
- case $cc_basename in
- CC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
- _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
- _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- case $host_os in
- solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
- *)
- # The compiler driver will combine and reorder linker options,
- # but understands `-z linker_flag'.
- # Supported since Solaris 2.6 (maybe 2.5.1?)
- _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
- ;;
- esac
- _LT_TAGVAR(link_all_deplibs, $1)=yes
-
- output_verbose_link_cmd='echo'
-
- # Archives containing C++ object files must be created using
- # "CC -xar", where "CC" is the Sun C++ compiler. This is
- # necessary to make sure instantiated templates are included
- # in the archive.
- _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
- ;;
- gcx*)
- # Green Hills C++ Compiler
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
-
- # The C++ compiler must be used to create the archive.
- _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
- ;;
- *)
- # GNU C++ compiler with Solaris linker
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
- if $CC --version | $GREP -v '^2\.7' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
- else
- # g++ 2.7 appears to require `-G' NOT `-shared' on this
- # platform.
- _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
- $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
- # linking a shared library.
- output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"'
- fi
-
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
- case $host_os in
- solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
- *)
- _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
- ;;
- esac
- fi
- ;;
- esac
- ;;
-
- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
- _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- runpath_var='LD_RUN_PATH'
-
- case $cc_basename in
- CC*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- ;;
-
- sysv5* | sco3.2v5* | sco5v6*)
- # Note: We can NOT use -z defs as we might desire, because we do not
- # link with -lc, and that would cause any symbols used from libc to
- # always be unresolved, which means just about no library would
- # ever link correctly. If we're not using GNU ld we use -z text
- # though, which does catch some bad symbols but isn't as heavy-handed
- # as -z defs.
- _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
- _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
- _LT_TAGVAR(archive_cmds_need_lc, $1)=no
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
- _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
- _LT_TAGVAR(link_all_deplibs, $1)=yes
- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
- runpath_var='LD_RUN_PATH'
-
- case $cc_basename in
- CC*)
- _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- ;;
-
- tandem*)
- case $cc_basename in
- NCC*)
- # NonStop-UX NCC 3.20
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
- ;;
-
- vxworks*)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
-
- *)
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
- ;;
- esac
-
- AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
- test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
-
- _LT_TAGVAR(GCC, $1)="$GXX"
- _LT_TAGVAR(LD, $1)="$LD"
-
- ## CAVEAT EMPTOR:
- ## There is no encapsulation within the following macros, do not change
- ## the running order or otherwise move them around unless you know exactly
- ## what you are doing...
- _LT_SYS_HIDDEN_LIBDEPS($1)
- _LT_COMPILER_PIC($1)
- _LT_COMPILER_C_O($1)
- _LT_COMPILER_FILE_LOCKS($1)
- _LT_LINKER_SHLIBS($1)
- _LT_SYS_DYNAMIC_LINKER($1)
- _LT_LINKER_HARDCODE_LIBPATH($1)
-
- _LT_CONFIG($1)
- fi # test -n "$compiler"
-
- CC=$lt_save_CC
- LDCXX=$LD
- LD=$lt_save_LD
- GCC=$lt_save_GCC
- with_gnu_ld=$lt_save_with_gnu_ld
- lt_cv_path_LDCXX=$lt_cv_path_LD
- lt_cv_path_LD=$lt_save_path_LD
- lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
- lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
-fi # test "$_lt_caught_CXX_error" != yes
-
-AC_LANG_POP
-])# _LT_LANG_CXX_CONFIG
-
-
-# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
-# ---------------------------------
-# Figure out "hidden" library dependencies from verbose
-# compiler output when linking a shared library.
-# Parse the compiler output and extract the necessary
-# objects, libraries and library flags.
-m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
-[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-# Dependencies to place before and after the object being linked:
-_LT_TAGVAR(predep_objects, $1)=
-_LT_TAGVAR(postdep_objects, $1)=
-_LT_TAGVAR(predeps, $1)=
-_LT_TAGVAR(postdeps, $1)=
-_LT_TAGVAR(compiler_lib_search_path, $1)=
-
-dnl we can't use the lt_simple_compile_test_code here,
-dnl because it contains code intended for an executable,
-dnl not a library. It's possible we should let each
-dnl tag define a new lt_????_link_test_code variable,
-dnl but it's only used here...
-m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
-int a;
-void foo (void) { a = 0; }
-_LT_EOF
-], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
-class Foo
-{
-public:
- Foo (void) { a = 0; }
-private:
- int a;
-};
-_LT_EOF
-], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
- subroutine foo
- implicit none
- integer*4 a
- a=0
- return
- end
-_LT_EOF
-], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
- subroutine foo
- implicit none
- integer a
- a=0
- return
- end
-_LT_EOF
-], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
-public class foo {
- private int a;
- public void bar (void) {
- a = 0;
- }
-};
-_LT_EOF
-])
-dnl Parse the compiler output and extract the necessary
-dnl objects, libraries and library flags.
-if AC_TRY_EVAL(ac_compile); then
- # Parse the compiler output and extract the necessary
- # objects, libraries and library flags.
-
- # Sentinel used to keep track of whether or not we are before
- # the conftest object file.
- pre_test_object_deps_done=no
-
- for p in `eval "$output_verbose_link_cmd"`; do
- case $p in
-
- -L* | -R* | -l*)
- # Some compilers place space between "-{L,R}" and the path.
- # Remove the space.
- if test $p = "-L" ||
- test $p = "-R"; then
- prev=$p
- continue
- else
- prev=
- fi
-
- if test "$pre_test_object_deps_done" = no; then
- case $p in
- -L* | -R*)
- # Internal compiler library paths should come after those
- # provided the user. The postdeps already come after the
- # user supplied libs so there is no need to process them.
- if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
- _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
- else
- _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
- fi
- ;;
- # The "-l" case would never come before the object being
- # linked, so don't bother handling this case.
- esac
- else
- if test -z "$_LT_TAGVAR(postdeps, $1)"; then
- _LT_TAGVAR(postdeps, $1)="${prev}${p}"
- else
- _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
- fi
- fi
- ;;
-
- *.$objext)
- # This assumes that the test object file only shows up
- # once in the compiler output.
- if test "$p" = "conftest.$objext"; then
- pre_test_object_deps_done=yes
- continue
- fi
-
- if test "$pre_test_object_deps_done" = no; then
- if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
- _LT_TAGVAR(predep_objects, $1)="$p"
- else
- _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
- fi
- else
- if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
- _LT_TAGVAR(postdep_objects, $1)="$p"
- else
- _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
- fi
- fi
- ;;
-
- *) ;; # Ignore the rest.
-
- esac
- done
-
- # Clean up.
- rm -f a.out a.exe
-else
- echo "libtool.m4: error: problem compiling $1 test program"
-fi
-
-$RM -f confest.$objext
-
-# PORTME: override above test on systems where it is broken
-m4_if([$1], [CXX],
-[case $host_os in
-interix[[3-9]]*)
- # Interix 3.5 installs completely hosed .la files for C++, so rather than
- # hack all around it, let's just trust "g++" to DTRT.
- _LT_TAGVAR(predep_objects,$1)=
- _LT_TAGVAR(postdep_objects,$1)=
- _LT_TAGVAR(postdeps,$1)=
- ;;
-
-linux*)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ C*)
- # Sun C++ 5.9
-
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
- # -library=stlport4 depends on it.
- case " $CXX $CXXFLAGS " in
- *" -library=stlport4 "*)
- solaris_use_stlport4=yes
- ;;
- esac
-
- if test "$solaris_use_stlport4" != yes; then
- _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
- fi
- ;;
- esac
- ;;
-
-solaris*)
- case $cc_basename in
- CC*)
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
- # -library=stlport4 depends on it.
- case " $CXX $CXXFLAGS " in
- *" -library=stlport4 "*)
- solaris_use_stlport4=yes
- ;;
- esac
-
- # Adding this requires a known-good setup of shared libraries for
- # Sun compiler versions before 5.6, else PIC objects from an old
- # archive will be linked into the output, leading to subtle bugs.
- if test "$solaris_use_stlport4" != yes; then
- _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
- fi
- ;;
- esac
- ;;
-esac
-])
-
-case " $_LT_TAGVAR(postdeps, $1) " in
-*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
-esac
- _LT_TAGVAR(compiler_lib_search_dirs, $1)=
-if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
- _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
-fi
-_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
- [The directories searched by this compiler when creating a shared library])
-_LT_TAGDECL([], [predep_objects], [1],
- [Dependencies to place before and after the objects being linked to
- create a shared library])
-_LT_TAGDECL([], [postdep_objects], [1])
-_LT_TAGDECL([], [predeps], [1])
-_LT_TAGDECL([], [postdeps], [1])
-_LT_TAGDECL([], [compiler_lib_search_path], [1],
- [The library search path used internally by the compiler when linking
- a shared library])
-])# _LT_SYS_HIDDEN_LIBDEPS
-
-
-# _LT_PROG_F77
-# ------------
-# Since AC_PROG_F77 is broken, in that it returns the empty string
-# if there is no fortran compiler, we have our own version here.
-m4_defun([_LT_PROG_F77],
-[
-pushdef([AC_MSG_ERROR], [_lt_disable_F77=yes])
-AC_PROG_F77
-if test -z "$F77" || test "X$F77" = "Xno"; then
- _lt_disable_F77=yes
-fi
-popdef([AC_MSG_ERROR])
-])# _LT_PROG_F77
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([_LT_PROG_F77], [])
-
-
-# _LT_LANG_F77_CONFIG([TAG])
-# --------------------------
-# Ensure that the configuration variables for a Fortran 77 compiler are
-# suitably defined. These variables are subsequently used by _LT_CONFIG
-# to write the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_F77_CONFIG],
-[AC_REQUIRE([_LT_PROG_F77])dnl
-AC_LANG_PUSH(Fortran 77)
-
-_LT_TAGVAR(archive_cmds_need_lc, $1)=no
-_LT_TAGVAR(allow_undefined_flag, $1)=
-_LT_TAGVAR(always_export_symbols, $1)=no
-_LT_TAGVAR(archive_expsym_cmds, $1)=
-_LT_TAGVAR(export_dynamic_flag_spec, $1)=
-_LT_TAGVAR(hardcode_direct, $1)=no
-_LT_TAGVAR(hardcode_direct_absolute, $1)=no
-_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
-_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
-_LT_TAGVAR(hardcode_libdir_separator, $1)=
-_LT_TAGVAR(hardcode_minus_L, $1)=no
-_LT_TAGVAR(hardcode_automatic, $1)=no
-_LT_TAGVAR(inherit_rpath, $1)=no
-_LT_TAGVAR(module_cmds, $1)=
-_LT_TAGVAR(module_expsym_cmds, $1)=
-_LT_TAGVAR(link_all_deplibs, $1)=unknown
-_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
-_LT_TAGVAR(no_undefined_flag, $1)=
-_LT_TAGVAR(whole_archive_flag_spec, $1)=
-_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
-
-# Source file extension for f77 test sources.
-ac_ext=f
-
-# Object file extension for compiled f77 test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# No sense in running all these tests if we already determined that
-# the F77 compiler isn't working. Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_disable_F77" != yes; then
- # Code to be used in simple compile tests
- lt_simple_compile_test_code="\
- subroutine t
- return
- end
-"
-
- # Code to be used in simple link tests
- lt_simple_link_test_code="\
- program t
- end
-"
-
- # ltmain only uses $CC for tagged configurations so make sure $CC is set.
- _LT_TAG_COMPILER
-
- # save warnings/boilerplate of simple test code
- _LT_COMPILER_BOILERPLATE
- _LT_LINKER_BOILERPLATE
-
- # Allow CC to be a program name with arguments.
- lt_save_CC="$CC"
- lt_save_GCC=$GCC
- CC=${F77-"f77"}
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
- GCC=$G77
- if test -n "$compiler"; then
- AC_MSG_CHECKING([if libtool supports shared libraries])
- AC_MSG_RESULT([$can_build_shared])
-
- AC_MSG_CHECKING([whether to build shared libraries])
- test "$can_build_shared" = "no" && enable_shared=no
-
- # On AIX, shared libraries and static libraries use the same namespace, and
- # are all built from PIC.
- case $host_os in
- aix3*)
- test "$enable_shared" = yes && enable_static=no
- if test -n "$RANLIB"; then
- archive_cmds="$archive_cmds~\$RANLIB \$lib"
- postinstall_cmds='$RANLIB $lib'
- fi
- ;;
- aix[[4-9]]*)
- if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
- test "$enable_shared" = yes && enable_static=no
- fi
- ;;
- esac
- AC_MSG_RESULT([$enable_shared])
-
- AC_MSG_CHECKING([whether to build static libraries])
- # Make sure either enable_shared or enable_static is yes.
- test "$enable_shared" = yes || enable_static=yes
- AC_MSG_RESULT([$enable_static])
-
- _LT_TAGVAR(GCC, $1)="$G77"
- _LT_TAGVAR(LD, $1)="$LD"
-
- ## CAVEAT EMPTOR:
- ## There is no encapsulation within the following macros, do not change
- ## the running order or otherwise move them around unless you know exactly
- ## what you are doing...
- _LT_COMPILER_PIC($1)
- _LT_COMPILER_C_O($1)
- _LT_COMPILER_FILE_LOCKS($1)
- _LT_LINKER_SHLIBS($1)
- _LT_SYS_DYNAMIC_LINKER($1)
- _LT_LINKER_HARDCODE_LIBPATH($1)
-
- _LT_CONFIG($1)
- fi # test -n "$compiler"
-
- GCC=$lt_save_GCC
- CC="$lt_save_CC"
-fi # test "$_lt_disable_F77" != yes
-
-AC_LANG_POP
-])# _LT_LANG_F77_CONFIG
-
-
-# _LT_PROG_FC
-# -----------
-# Since AC_PROG_FC is broken, in that it returns the empty string
-# if there is no fortran compiler, we have our own version here.
-m4_defun([_LT_PROG_FC],
-[
-pushdef([AC_MSG_ERROR], [_lt_disable_FC=yes])
-AC_PROG_FC
-if test -z "$FC" || test "X$FC" = "Xno"; then
- _lt_disable_FC=yes
-fi
-popdef([AC_MSG_ERROR])
-])# _LT_PROG_FC
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([_LT_PROG_FC], [])
-
-
-# _LT_LANG_FC_CONFIG([TAG])
-# -------------------------
-# Ensure that the configuration variables for a Fortran compiler are
-# suitably defined. These variables are subsequently used by _LT_CONFIG
-# to write the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_FC_CONFIG],
-[AC_REQUIRE([_LT_PROG_FC])dnl
-AC_LANG_PUSH(Fortran)
-
-_LT_TAGVAR(archive_cmds_need_lc, $1)=no
-_LT_TAGVAR(allow_undefined_flag, $1)=
-_LT_TAGVAR(always_export_symbols, $1)=no
-_LT_TAGVAR(archive_expsym_cmds, $1)=
-_LT_TAGVAR(export_dynamic_flag_spec, $1)=
-_LT_TAGVAR(hardcode_direct, $1)=no
-_LT_TAGVAR(hardcode_direct_absolute, $1)=no
-_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
-_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
-_LT_TAGVAR(hardcode_libdir_separator, $1)=
-_LT_TAGVAR(hardcode_minus_L, $1)=no
-_LT_TAGVAR(hardcode_automatic, $1)=no
-_LT_TAGVAR(inherit_rpath, $1)=no
-_LT_TAGVAR(module_cmds, $1)=
-_LT_TAGVAR(module_expsym_cmds, $1)=
-_LT_TAGVAR(link_all_deplibs, $1)=unknown
-_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
-_LT_TAGVAR(no_undefined_flag, $1)=
-_LT_TAGVAR(whole_archive_flag_spec, $1)=
-_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
-
-# Source file extension for fc test sources.
-ac_ext=${ac_fc_srcext-f}
-
-# Object file extension for compiled fc test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# No sense in running all these tests if we already determined that
-# the FC compiler isn't working. Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_disable_FC" != yes; then
- # Code to be used in simple compile tests
- lt_simple_compile_test_code="\
- subroutine t
- return
- end
-"
-
- # Code to be used in simple link tests
- lt_simple_link_test_code="\
- program t
- end
-"
-
- # ltmain only uses $CC for tagged configurations so make sure $CC is set.
- _LT_TAG_COMPILER
-
- # save warnings/boilerplate of simple test code
- _LT_COMPILER_BOILERPLATE
- _LT_LINKER_BOILERPLATE
-
- # Allow CC to be a program name with arguments.
- lt_save_CC="$CC"
- lt_save_GCC=$GCC
- CC=${FC-"f95"}
- compiler=$CC
- GCC=$ac_cv_fc_compiler_gnu
-
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
-
- if test -n "$compiler"; then
- AC_MSG_CHECKING([if libtool supports shared libraries])
- AC_MSG_RESULT([$can_build_shared])
-
- AC_MSG_CHECKING([whether to build shared libraries])
- test "$can_build_shared" = "no" && enable_shared=no
-
- # On AIX, shared libraries and static libraries use the same namespace, and
- # are all built from PIC.
- case $host_os in
- aix3*)
- test "$enable_shared" = yes && enable_static=no
- if test -n "$RANLIB"; then
- archive_cmds="$archive_cmds~\$RANLIB \$lib"
- postinstall_cmds='$RANLIB $lib'
- fi
- ;;
- aix[[4-9]]*)
- if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
- test "$enable_shared" = yes && enable_static=no
- fi
- ;;
- esac
- AC_MSG_RESULT([$enable_shared])
-
- AC_MSG_CHECKING([whether to build static libraries])
- # Make sure either enable_shared or enable_static is yes.
- test "$enable_shared" = yes || enable_static=yes
- AC_MSG_RESULT([$enable_static])
-
- _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
- _LT_TAGVAR(LD, $1)="$LD"
-
- ## CAVEAT EMPTOR:
- ## There is no encapsulation within the following macros, do not change
- ## the running order or otherwise move them around unless you know exactly
- ## what you are doing...
- _LT_SYS_HIDDEN_LIBDEPS($1)
- _LT_COMPILER_PIC($1)
- _LT_COMPILER_C_O($1)
- _LT_COMPILER_FILE_LOCKS($1)
- _LT_LINKER_SHLIBS($1)
- _LT_SYS_DYNAMIC_LINKER($1)
- _LT_LINKER_HARDCODE_LIBPATH($1)
-
- _LT_CONFIG($1)
- fi # test -n "$compiler"
-
- GCC=$lt_save_GCC
- CC="$lt_save_CC"
-fi # test "$_lt_disable_FC" != yes
-
-AC_LANG_POP
-])# _LT_LANG_FC_CONFIG
-
-
-# _LT_LANG_GCJ_CONFIG([TAG])
-# --------------------------
-# Ensure that the configuration variables for the GNU Java Compiler compiler
-# are suitably defined. These variables are subsequently used by _LT_CONFIG
-# to write the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_GCJ_CONFIG],
-[AC_REQUIRE([LT_PROG_GCJ])dnl
-AC_LANG_SAVE
-
-# Source file extension for Java test sources.
-ac_ext=java
-
-# Object file extension for compiled Java test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code="class foo {}"
-
-# Code to be used in simple link tests
-lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
-
-# ltmain only uses $CC for tagged configurations so make sure $CC is set.
-_LT_TAG_COMPILER
-
-# save warnings/boilerplate of simple test code
-_LT_COMPILER_BOILERPLATE
-_LT_LINKER_BOILERPLATE
-
-# Allow CC to be a program name with arguments.
-lt_save_CC="$CC"
-lt_save_GCC=$GCC
-GCC=yes
-CC=${GCJ-"gcj"}
-compiler=$CC
-_LT_TAGVAR(compiler, $1)=$CC
-_LT_TAGVAR(LD, $1)="$LD"
-_LT_CC_BASENAME([$compiler])
-
-# GCJ did not exist at the time GCC didn't implicitly link libc in.
-_LT_TAGVAR(archive_cmds_need_lc, $1)=no
-
-_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
-
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
-if test -n "$compiler"; then
- _LT_COMPILER_NO_RTTI($1)
- _LT_COMPILER_PIC($1)
- _LT_COMPILER_C_O($1)
- _LT_COMPILER_FILE_LOCKS($1)
- _LT_LINKER_SHLIBS($1)
- _LT_LINKER_HARDCODE_LIBPATH($1)
-
- _LT_CONFIG($1)
-fi
-
-AC_LANG_RESTORE
-
-GCC=$lt_save_GCC
-CC="$lt_save_CC"
-])# _LT_LANG_GCJ_CONFIG
-
-
-# _LT_LANG_RC_CONFIG([TAG])
-# -------------------------
-# Ensure that the configuration variables for the Windows resource compiler
-# are suitably defined. These variables are subsequently used by _LT_CONFIG
-# to write the compiler configuration to `libtool'.
-m4_defun([_LT_LANG_RC_CONFIG],
-[AC_REQUIRE([LT_PROG_RC])dnl
-AC_LANG_SAVE
-
-# Source file extension for RC test sources.
-ac_ext=rc
-
-# Object file extension for compiled RC test sources.
-objext=o
-_LT_TAGVAR(objext, $1)=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
-
-# Code to be used in simple link tests
-lt_simple_link_test_code="$lt_simple_compile_test_code"
-
-# ltmain only uses $CC for tagged configurations so make sure $CC is set.
-_LT_TAG_COMPILER
-
-# save warnings/boilerplate of simple test code
-_LT_COMPILER_BOILERPLATE
-_LT_LINKER_BOILERPLATE
-
-# Allow CC to be a program name with arguments.
-lt_save_CC="$CC"
-lt_save_GCC=$GCC
-GCC=
-CC=${RC-"windres"}
-compiler=$CC
-_LT_TAGVAR(compiler, $1)=$CC
-_LT_CC_BASENAME([$compiler])
-_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
-
-if test -n "$compiler"; then
- :
- _LT_CONFIG($1)
-fi
-
-GCC=$lt_save_GCC
-AC_LANG_RESTORE
-CC="$lt_save_CC"
-])# _LT_LANG_RC_CONFIG
-
-
-# LT_PROG_GCJ
-# -----------
-AC_DEFUN([LT_PROG_GCJ],
-[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
- [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
- [AC_CHECK_TOOL(GCJ, gcj,)
- test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
- AC_SUBST(GCJFLAGS)])])[]dnl
-])
-
-# Old name:
-AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
-
-
-# LT_PROG_RC
-# ----------
-AC_DEFUN([LT_PROG_RC],
-[AC_CHECK_TOOL(RC, windres,)
-])
-
-# Old name:
-AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([LT_AC_PROG_RC], [])
-
-
-# _LT_DECL_EGREP
-# --------------
-# If we don't have a new enough Autoconf to choose the best grep
-# available, choose the one first in the user's PATH.
-m4_defun([_LT_DECL_EGREP],
-[AC_REQUIRE([AC_PROG_EGREP])dnl
-AC_REQUIRE([AC_PROG_FGREP])dnl
-test -z "$GREP" && GREP=grep
-_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
-_LT_DECL([], [EGREP], [1], [An ERE matcher])
-_LT_DECL([], [FGREP], [1], [A literal string matcher])
-dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
-AC_SUBST([GREP])
-])
-
-
-# _LT_DECL_OBJDUMP
-# --------------
-# If we don't have a new enough Autoconf to choose the best objdump
-# available, choose the one first in the user's PATH.
-m4_defun([_LT_DECL_OBJDUMP],
-[AC_CHECK_TOOL(OBJDUMP, objdump, false)
-test -z "$OBJDUMP" && OBJDUMP=objdump
-_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
-AC_SUBST([OBJDUMP])
-])
-
-
-# _LT_DECL_SED
-# ------------
-# Check for a fully-functional sed program, that truncates
-# as few characters as possible. Prefer GNU sed if found.
-m4_defun([_LT_DECL_SED],
-[AC_PROG_SED
-test -z "$SED" && SED=sed
-Xsed="$SED -e 1s/^X//"
-_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
-_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
- [Sed that helps us avoid accidentally triggering echo(1) options like -n])
-])# _LT_DECL_SED
-
-m4_ifndef([AC_PROG_SED], [
-############################################################
-# NOTE: This macro has been submitted for inclusion into #
-# GNU Autoconf as AC_PROG_SED. When it is available in #
-# a released version of Autoconf we should remove this #
-# macro and use it instead. #
-############################################################
-
-m4_defun([AC_PROG_SED],
-[AC_MSG_CHECKING([for a sed that does not truncate output])
-AC_CACHE_VAL(lt_cv_path_SED,
-[# Loop through the user's path and test for sed and gsed.
-# Then use that list of sed's as ones to test for truncation.
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- test -z "$as_dir" && as_dir=.
- for lt_ac_prog in sed gsed; do
- for ac_exec_ext in '' $ac_executable_extensions; do
- if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
- lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
- fi
- done
- done
-done
-IFS=$as_save_IFS
-lt_ac_max=0
-lt_ac_count=0
-# Add /usr/xpg4/bin/sed as it is typically found on Solaris
-# along with /bin/sed that truncates output.
-for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
- test ! -f $lt_ac_sed && continue
- cat /dev/null > conftest.in
- lt_ac_count=0
- echo $ECHO_N "0123456789$ECHO_C" >conftest.in
- # Check for GNU sed and select it if it is found.
- if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
- lt_cv_path_SED=$lt_ac_sed
- break
- fi
- while true; do
- cat conftest.in conftest.in >conftest.tmp
- mv conftest.tmp conftest.in
- cp conftest.in conftest.nl
- echo >>conftest.nl
- $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
- cmp -s conftest.out conftest.nl || break
- # 10000 chars as input seems more than enough
- test $lt_ac_count -gt 10 && break
- lt_ac_count=`expr $lt_ac_count + 1`
- if test $lt_ac_count -gt $lt_ac_max; then
- lt_ac_max=$lt_ac_count
- lt_cv_path_SED=$lt_ac_sed
- fi
- done
-done
-])
-SED=$lt_cv_path_SED
-AC_SUBST([SED])
-AC_MSG_RESULT([$SED])
-])#AC_PROG_SED
-])#m4_ifndef
-
-# Old name:
-AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([LT_AC_PROG_SED], [])
-
-
-# _LT_CHECK_SHELL_FEATURES
-# ------------------------
-# Find out whether the shell is Bourne or XSI compatible,
-# or has some other useful features.
-m4_defun([_LT_CHECK_SHELL_FEATURES],
-[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
-# Try some XSI features
-xsi_shell=no
-( _lt_dummy="a/b/c"
- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
- = c,a/b,, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-AC_MSG_RESULT([$xsi_shell])
-_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
-
-AC_MSG_CHECKING([whether the shell understands "+="])
-lt_shell_append=no
-( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
- >/dev/null 2>&1 \
- && lt_shell_append=yes
-AC_MSG_RESULT([$lt_shell_append])
-_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
-
-if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
- lt_unset=unset
-else
- lt_unset=false
-fi
-_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
-
-# test EBCDIC or ASCII
-case `echo X|tr X '\101'` in
- A) # ASCII based system
- # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
- lt_SP2NL='tr \040 \012'
- lt_NL2SP='tr \015\012 \040\040'
- ;;
- *) # EBCDIC based system
- lt_SP2NL='tr \100 \n'
- lt_NL2SP='tr \r\n \100\100'
- ;;
-esac
-_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
-_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
-])# _LT_CHECK_SHELL_FEATURES
-
-
-# _LT_PROG_XSI_SHELLFNS
-# ---------------------
-# Bourne and XSI compatible variants of some useful shell functions.
-m4_defun([_LT_PROG_XSI_SHELLFNS],
-[case $xsi_shell in
- yes)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_dirname file append nondir_replacement
-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
-# otherwise set result to NONDIR_REPLACEMENT.
-func_dirname ()
-{
- case ${1} in
- */*) func_dirname_result="${1%/*}${2}" ;;
- * ) func_dirname_result="${3}" ;;
- esac
-}
-
-# func_basename file
-func_basename ()
-{
- func_basename_result="${1##*/}"
-}
-
-# func_dirname_and_basename file append nondir_replacement
-# perform func_basename and func_dirname in a single function
-# call:
-# dirname: Compute the dirname of FILE. If nonempty,
-# add APPEND to the result, otherwise set result
-# to NONDIR_REPLACEMENT.
-# value returned in "$func_dirname_result"
-# basename: Compute filename of FILE.
-# value retuned in "$func_basename_result"
-# Implementation must be kept synchronized with func_dirname
-# and func_basename. For efficiency, we do not delegate to
-# those functions but instead duplicate the functionality here.
-func_dirname_and_basename ()
-{
- case ${1} in
- */*) func_dirname_result="${1%/*}${2}" ;;
- * ) func_dirname_result="${3}" ;;
- esac
- func_basename_result="${1##*/}"
-}
-
-# func_stripname prefix suffix name
-# strip PREFIX and SUFFIX off of NAME.
-# PREFIX and SUFFIX must not contain globbing or regex special
-# characters, hashes, percent signs, but SUFFIX may contain a leading
-# dot (in which case that matches only a dot).
-func_stripname ()
-{
- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
- # positional parameters, so assign one to ordinary parameter first.
- func_stripname_result=${3}
- func_stripname_result=${func_stripname_result#"${1}"}
- func_stripname_result=${func_stripname_result%"${2}"}
-}
-
-# func_opt_split
-func_opt_split ()
-{
- func_opt_split_opt=${1%%=*}
- func_opt_split_arg=${1#*=}
-}
-
-# func_lo2o object
-func_lo2o ()
-{
- case ${1} in
- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
- *) func_lo2o_result=${1} ;;
- esac
-}
-
-# func_xform libobj-or-source
-func_xform ()
-{
- func_xform_result=${1%.*}.lo
-}
-
-# func_arith arithmetic-term...
-func_arith ()
-{
- func_arith_result=$(( $[*] ))
-}
-
-# func_len string
-# STRING may not start with a hyphen.
-func_len ()
-{
- func_len_result=${#1}
-}
-
-_LT_EOF
- ;;
- *) # Bourne compatible functions.
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_dirname file append nondir_replacement
-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
-# otherwise set result to NONDIR_REPLACEMENT.
-func_dirname ()
-{
- # Extract subdirectory from the argument.
- func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"`
- if test "X$func_dirname_result" = "X${1}"; then
- func_dirname_result="${3}"
- else
- func_dirname_result="$func_dirname_result${2}"
- fi
-}
-
-# func_basename file
-func_basename ()
-{
- func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"`
-}
-
-dnl func_dirname_and_basename
-dnl A portable version of this function is already defined in general.m4sh
-dnl so there is no need for it here.
-
-# func_stripname prefix suffix name
-# strip PREFIX and SUFFIX off of NAME.
-# PREFIX and SUFFIX must not contain globbing or regex special
-# characters, hashes, percent signs, but SUFFIX may contain a leading
-# dot (in which case that matches only a dot).
-# func_strip_suffix prefix name
-func_stripname ()
-{
- case ${2} in
- .*) func_stripname_result=`$ECHO "X${3}" \
- | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;;
- *) func_stripname_result=`$ECHO "X${3}" \
- | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;;
- esac
-}
-
-# sed scripts:
-my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q'
-my_sed_long_arg='1s/^-[[^=]]*=//'
-
-# func_opt_split
-func_opt_split ()
-{
- func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"`
- func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"`
-}
-
-# func_lo2o object
-func_lo2o ()
-{
- func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"`
-}
-
-# func_xform libobj-or-source
-func_xform ()
-{
- func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[[^.]]*$/.lo/'`
-}
-
-# func_arith arithmetic-term...
-func_arith ()
-{
- func_arith_result=`expr "$[@]"`
-}
-
-# func_len string
-# STRING may not start with a hyphen.
-func_len ()
-{
- func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len`
-}
-
-_LT_EOF
-esac
-
-case $lt_shell_append in
- yes)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_append var value
-# Append VALUE to the end of shell variable VAR.
-func_append ()
-{
- eval "$[1]+=\$[2]"
-}
-_LT_EOF
- ;;
- *)
- cat << \_LT_EOF >> "$cfgfile"
-
-# func_append var value
-# Append VALUE to the end of shell variable VAR.
-func_append ()
-{
- eval "$[1]=\$$[1]\$[2]"
-}
-
-_LT_EOF
- ;;
- esac
-])
diff --git a/scripts/training/compact-rule-table/m4/ltoptions.m4 b/scripts/training/compact-rule-table/m4/ltoptions.m4
deleted file mode 100644
index 34151a3ba..000000000
--- a/scripts/training/compact-rule-table/m4/ltoptions.m4
+++ /dev/null
@@ -1,368 +0,0 @@
-# Helper functions for option handling. -*- Autoconf -*-
-#
-# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
-# Written by Gary V. Vaughan, 2004
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-# serial 6 ltoptions.m4
-
-# This is to help aclocal find these macros, as it can't see m4_define.
-AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
-
-
-# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
-# ------------------------------------------
-m4_define([_LT_MANGLE_OPTION],
-[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
-
-
-# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
-# ---------------------------------------
-# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
-# matching handler defined, dispatch to it. Other OPTION-NAMEs are
-# saved as a flag.
-m4_define([_LT_SET_OPTION],
-[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
-m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
- _LT_MANGLE_DEFUN([$1], [$2]),
- [m4_warning([Unknown $1 option `$2'])])[]dnl
-])
-
-
-# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
-# ------------------------------------------------------------
-# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
-m4_define([_LT_IF_OPTION],
-[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
-
-
-# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
-# -------------------------------------------------------
-# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
-# are set.
-m4_define([_LT_UNLESS_OPTIONS],
-[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
- [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
- [m4_define([$0_found])])])[]dnl
-m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
-])[]dnl
-])
-
-
-# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
-# ----------------------------------------
-# OPTION-LIST is a space-separated list of Libtool options associated
-# with MACRO-NAME. If any OPTION has a matching handler declared with
-# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
-# the unknown option and exit.
-m4_defun([_LT_SET_OPTIONS],
-[# Set options
-m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
- [_LT_SET_OPTION([$1], _LT_Option)])
-
-m4_if([$1],[LT_INIT],[
- dnl
- dnl Simply set some default values (i.e off) if boolean options were not
- dnl specified:
- _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
- ])
- _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
- ])
- dnl
- dnl If no reference was made to various pairs of opposing options, then
- dnl we run the default mode handler for the pair. For example, if neither
- dnl `shared' nor `disable-shared' was passed, we enable building of shared
- dnl archives by default:
- _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
- _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
- _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
- _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
- [_LT_ENABLE_FAST_INSTALL])
- ])
-])# _LT_SET_OPTIONS
-
-
-## --------------------------------- ##
-## Macros to handle LT_INIT options. ##
-## --------------------------------- ##
-
-# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
-# -----------------------------------------
-m4_define([_LT_MANGLE_DEFUN],
-[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
-
-
-# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
-# -----------------------------------------------
-m4_define([LT_OPTION_DEFINE],
-[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
-])# LT_OPTION_DEFINE
-
-
-# dlopen
-# ------
-LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
-])
-
-AU_DEFUN([AC_LIBTOOL_DLOPEN],
-[_LT_SET_OPTION([LT_INIT], [dlopen])
-AC_DIAGNOSE([obsolete],
-[$0: Remove this warning and the call to _LT_SET_OPTION when you
-put the `dlopen' option into LT_INIT's first parameter.])
-])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
-
-
-# win32-dll
-# ---------
-# Declare package support for building win32 dll's.
-LT_OPTION_DEFINE([LT_INIT], [win32-dll],
-[enable_win32_dll=yes
-
-case $host in
-*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-cegcc*)
- AC_CHECK_TOOL(AS, as, false)
- AC_CHECK_TOOL(DLLTOOL, dlltool, false)
- AC_CHECK_TOOL(OBJDUMP, objdump, false)
- ;;
-esac
-
-test -z "$AS" && AS=as
-_LT_DECL([], [AS], [0], [Assembler program])dnl
-
-test -z "$DLLTOOL" && DLLTOOL=dlltool
-_LT_DECL([], [DLLTOOL], [0], [DLL creation program])dnl
-
-test -z "$OBJDUMP" && OBJDUMP=objdump
-_LT_DECL([], [OBJDUMP], [0], [Object dumper program])dnl
-])# win32-dll
-
-AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
-[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-_LT_SET_OPTION([LT_INIT], [win32-dll])
-AC_DIAGNOSE([obsolete],
-[$0: Remove this warning and the call to _LT_SET_OPTION when you
-put the `win32-dll' option into LT_INIT's first parameter.])
-])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
-
-
-# _LT_ENABLE_SHARED([DEFAULT])
-# ----------------------------
-# implement the --enable-shared flag, and supports the `shared' and
-# `disable-shared' LT_INIT options.
-# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
-m4_define([_LT_ENABLE_SHARED],
-[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
-AC_ARG_ENABLE([shared],
- [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
- [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
- [p=${PACKAGE-default}
- case $enableval in
- yes) enable_shared=yes ;;
- no) enable_shared=no ;;
- *)
- enable_shared=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_shared=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac],
- [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
-
- _LT_DECL([build_libtool_libs], [enable_shared], [0],
- [Whether or not to build shared libraries])
-])# _LT_ENABLE_SHARED
-
-LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
-LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
-
-# Old names:
-AC_DEFUN([AC_ENABLE_SHARED],
-[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
-])
-
-AC_DEFUN([AC_DISABLE_SHARED],
-[_LT_SET_OPTION([LT_INIT], [disable-shared])
-])
-
-AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
-AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AM_ENABLE_SHARED], [])
-dnl AC_DEFUN([AM_DISABLE_SHARED], [])
-
-
-
-# _LT_ENABLE_STATIC([DEFAULT])
-# ----------------------------
-# implement the --enable-static flag, and support the `static' and
-# `disable-static' LT_INIT options.
-# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
-m4_define([_LT_ENABLE_STATIC],
-[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
-AC_ARG_ENABLE([static],
- [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
- [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
- [p=${PACKAGE-default}
- case $enableval in
- yes) enable_static=yes ;;
- no) enable_static=no ;;
- *)
- enable_static=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_static=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac],
- [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
-
- _LT_DECL([build_old_libs], [enable_static], [0],
- [Whether or not to build static libraries])
-])# _LT_ENABLE_STATIC
-
-LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
-LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
-
-# Old names:
-AC_DEFUN([AC_ENABLE_STATIC],
-[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
-])
-
-AC_DEFUN([AC_DISABLE_STATIC],
-[_LT_SET_OPTION([LT_INIT], [disable-static])
-])
-
-AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
-AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AM_ENABLE_STATIC], [])
-dnl AC_DEFUN([AM_DISABLE_STATIC], [])
-
-
-
-# _LT_ENABLE_FAST_INSTALL([DEFAULT])
-# ----------------------------------
-# implement the --enable-fast-install flag, and support the `fast-install'
-# and `disable-fast-install' LT_INIT options.
-# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
-m4_define([_LT_ENABLE_FAST_INSTALL],
-[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
-AC_ARG_ENABLE([fast-install],
- [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
- [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
- [p=${PACKAGE-default}
- case $enableval in
- yes) enable_fast_install=yes ;;
- no) enable_fast_install=no ;;
- *)
- enable_fast_install=no
- # Look at the argument we got. We use all the common list separators.
- lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
- for pkg in $enableval; do
- IFS="$lt_save_ifs"
- if test "X$pkg" = "X$p"; then
- enable_fast_install=yes
- fi
- done
- IFS="$lt_save_ifs"
- ;;
- esac],
- [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
-
-_LT_DECL([fast_install], [enable_fast_install], [0],
- [Whether or not to optimize for fast installation])dnl
-])# _LT_ENABLE_FAST_INSTALL
-
-LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
-LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
-
-# Old names:
-AU_DEFUN([AC_ENABLE_FAST_INSTALL],
-[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
-AC_DIAGNOSE([obsolete],
-[$0: Remove this warning and the call to _LT_SET_OPTION when you put
-the `fast-install' option into LT_INIT's first parameter.])
-])
-
-AU_DEFUN([AC_DISABLE_FAST_INSTALL],
-[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
-AC_DIAGNOSE([obsolete],
-[$0: Remove this warning and the call to _LT_SET_OPTION when you put
-the `disable-fast-install' option into LT_INIT's first parameter.])
-])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
-dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
-
-
-# _LT_WITH_PIC([MODE])
-# --------------------
-# implement the --with-pic flag, and support the `pic-only' and `no-pic'
-# LT_INIT options.
-# MODE is either `yes' or `no'. If omitted, it defaults to `both'.
-m4_define([_LT_WITH_PIC],
-[AC_ARG_WITH([pic],
- [AS_HELP_STRING([--with-pic],
- [try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
- [pic_mode="$withval"],
- [pic_mode=default])
-
-test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
-
-_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
-])# _LT_WITH_PIC
-
-LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
-LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
-
-# Old name:
-AU_DEFUN([AC_LIBTOOL_PICMODE],
-[_LT_SET_OPTION([LT_INIT], [pic-only])
-AC_DIAGNOSE([obsolete],
-[$0: Remove this warning and the call to _LT_SET_OPTION when you
-put the `pic-only' option into LT_INIT's first parameter.])
-])
-
-dnl aclocal-1.4 backwards compatibility:
-dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
-
-## ----------------- ##
-## LTDL_INIT Options ##
-## ----------------- ##
-
-m4_define([_LTDL_MODE], [])
-LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
- [m4_define([_LTDL_MODE], [nonrecursive])])
-LT_OPTION_DEFINE([LTDL_INIT], [recursive],
- [m4_define([_LTDL_MODE], [recursive])])
-LT_OPTION_DEFINE([LTDL_INIT], [subproject],
- [m4_define([_LTDL_MODE], [subproject])])
-
-m4_define([_LTDL_TYPE], [])
-LT_OPTION_DEFINE([LTDL_INIT], [installable],
- [m4_define([_LTDL_TYPE], [installable])])
-LT_OPTION_DEFINE([LTDL_INIT], [convenience],
- [m4_define([_LTDL_TYPE], [convenience])])
diff --git a/scripts/training/compact-rule-table/m4/ltsugar.m4 b/scripts/training/compact-rule-table/m4/ltsugar.m4
deleted file mode 100644
index 9000a057d..000000000
--- a/scripts/training/compact-rule-table/m4/ltsugar.m4
+++ /dev/null
@@ -1,123 +0,0 @@
-# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*-
-#
-# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
-# Written by Gary V. Vaughan, 2004
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-# serial 6 ltsugar.m4
-
-# This is to help aclocal find these macros, as it can't see m4_define.
-AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
-
-
-# lt_join(SEP, ARG1, [ARG2...])
-# -----------------------------
-# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
-# associated separator.
-# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
-# versions in m4sugar had bugs.
-m4_define([lt_join],
-[m4_if([$#], [1], [],
- [$#], [2], [[$2]],
- [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
-m4_define([_lt_join],
-[m4_if([$#$2], [2], [],
- [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
-
-
-# lt_car(LIST)
-# lt_cdr(LIST)
-# ------------
-# Manipulate m4 lists.
-# These macros are necessary as long as will still need to support
-# Autoconf-2.59 which quotes differently.
-m4_define([lt_car], [[$1]])
-m4_define([lt_cdr],
-[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
- [$#], 1, [],
- [m4_dquote(m4_shift($@))])])
-m4_define([lt_unquote], $1)
-
-
-# lt_append(MACRO-NAME, STRING, [SEPARATOR])
-# ------------------------------------------
-# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
-# Note that neither SEPARATOR nor STRING are expanded; they are appended
-# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
-# No SEPARATOR is output if MACRO-NAME was previously undefined (different
-# than defined and empty).
-#
-# This macro is needed until we can rely on Autoconf 2.62, since earlier
-# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
-m4_define([lt_append],
-[m4_define([$1],
- m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
-
-
-
-# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
-# ----------------------------------------------------------
-# Produce a SEP delimited list of all paired combinations of elements of
-# PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list
-# has the form PREFIXmINFIXSUFFIXn.
-# Needed until we can rely on m4_combine added in Autoconf 2.62.
-m4_define([lt_combine],
-[m4_if(m4_eval([$# > 3]), [1],
- [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
-[[m4_foreach([_Lt_prefix], [$2],
- [m4_foreach([_Lt_suffix],
- ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
- [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
-
-
-# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
-# -----------------------------------------------------------------------
-# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
-# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
-m4_define([lt_if_append_uniq],
-[m4_ifdef([$1],
- [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
- [lt_append([$1], [$2], [$3])$4],
- [$5])],
- [lt_append([$1], [$2], [$3])$4])])
-
-
-# lt_dict_add(DICT, KEY, VALUE)
-# -----------------------------
-m4_define([lt_dict_add],
-[m4_define([$1($2)], [$3])])
-
-
-# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
-# --------------------------------------------
-m4_define([lt_dict_add_subkey],
-[m4_define([$1($2:$3)], [$4])])
-
-
-# lt_dict_fetch(DICT, KEY, [SUBKEY])
-# ----------------------------------
-m4_define([lt_dict_fetch],
-[m4_ifval([$3],
- m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
- m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
-
-
-# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
-# -----------------------------------------------------------------
-m4_define([lt_if_dict_fetch],
-[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
- [$5],
- [$6])])
-
-
-# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
-# --------------------------------------------------------------
-m4_define([lt_dict_filter],
-[m4_if([$5], [], [],
- [lt_join(m4_quote(m4_default([$4], [[, ]])),
- lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
- [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
-])
diff --git a/scripts/training/compact-rule-table/m4/ltversion.m4 b/scripts/training/compact-rule-table/m4/ltversion.m4
deleted file mode 100644
index f3c530980..000000000
--- a/scripts/training/compact-rule-table/m4/ltversion.m4
+++ /dev/null
@@ -1,23 +0,0 @@
-# ltversion.m4 -- version numbers -*- Autoconf -*-
-#
-# Copyright (C) 2004 Free Software Foundation, Inc.
-# Written by Scott James Remnant, 2004
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-# Generated from ltversion.in.
-
-# serial 3017 ltversion.m4
-# This file is part of GNU Libtool
-
-m4_define([LT_PACKAGE_VERSION], [2.2.6b])
-m4_define([LT_PACKAGE_REVISION], [1.3017])
-
-AC_DEFUN([LTVERSION_VERSION],
-[macro_version='2.2.6b'
-macro_revision='1.3017'
-_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
-_LT_DECL(, macro_revision, 0)
-])
diff --git a/scripts/training/compact-rule-table/m4/lt~obsolete.m4 b/scripts/training/compact-rule-table/m4/lt~obsolete.m4
deleted file mode 100644
index 637bb2066..000000000
--- a/scripts/training/compact-rule-table/m4/lt~obsolete.m4
+++ /dev/null
@@ -1,92 +0,0 @@
-# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
-#
-# Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
-# Written by Scott James Remnant, 2004.
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-
-# serial 4 lt~obsolete.m4
-
-# These exist entirely to fool aclocal when bootstrapping libtool.
-#
-# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
-# which have later been changed to m4_define as they aren't part of the
-# exported API, or moved to Autoconf or Automake where they belong.
-#
-# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
-# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
-# using a macro with the same name in our local m4/libtool.m4 it'll
-# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
-# and doesn't know about Autoconf macros at all.)
-#
-# So we provide this file, which has a silly filename so it's always
-# included after everything else. This provides aclocal with the
-# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
-# because those macros already exist, or will be overwritten later.
-# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
-#
-# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
-# Yes, that means every name once taken will need to remain here until
-# we give up compatibility with versions before 1.7, at which point
-# we need to keep only those names which we still refer to.
-
-# This is to help aclocal find these macros, as it can't see m4_define.
-AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
-
-m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
-m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
-m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
-m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
-m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
-m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
-m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
-m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
-m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
-m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
-m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
-m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
-m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
-m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
-m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
-m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
-m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
-m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
-m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
-m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
-m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
-m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
-m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
-m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
-m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
-m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
-m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
-m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
-m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
-m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
-m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
-m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
-m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
-m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
-m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
-m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
-m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
-m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
-m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
-m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
-m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
-m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
-m4_ifndef([AC_LIBTOOL_RC], [AC_DEFUN([AC_LIBTOOL_RC])])
-m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
-m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
-m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
-m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
-m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
-m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
-m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
-m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
-m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
diff --git a/scripts/training/compact-rule-table/missing b/scripts/training/compact-rule-table/missing
deleted file mode 100755
index 28055d2ae..000000000
--- a/scripts/training/compact-rule-table/missing
+++ /dev/null
@@ -1,376 +0,0 @@
-#! /bin/sh
-# Common stub for a few missing GNU programs while installing.
-
-scriptversion=2009-04-28.21; # UTC
-
-# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006,
-# 2008, 2009 Free Software Foundation, Inc.
-# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2, or (at your option)
-# any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that program.
-
-if test $# -eq 0; then
- echo 1>&2 "Try \`$0 --help' for more information"
- exit 1
-fi
-
-run=:
-sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p'
-sed_minuso='s/.* -o \([^ ]*\).*/\1/p'
-
-# In the cases where this matters, `missing' is being run in the
-# srcdir already.
-if test -f configure.ac; then
- configure_ac=configure.ac
-else
- configure_ac=configure.in
-fi
-
-msg="missing on your system"
-
-case $1 in
---run)
- # Try to run requested program, and just exit if it succeeds.
- run=
- shift
- "$@" && exit 0
- # Exit code 63 means version mismatch. This often happens
- # when the user try to use an ancient version of a tool on
- # a file that requires a minimum version. In this case we
- # we should proceed has if the program had been absent, or
- # if --run hadn't been passed.
- if test $? = 63; then
- run=:
- msg="probably too old"
- fi
- ;;
-
- -h|--h|--he|--hel|--help)
- echo "\
-$0 [OPTION]... PROGRAM [ARGUMENT]...
-
-Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an
-error status if there is no known handling for PROGRAM.
-
-Options:
- -h, --help display this help and exit
- -v, --version output version information and exit
- --run try to run the given command, and emulate it if it fails
-
-Supported PROGRAM values:
- aclocal touch file \`aclocal.m4'
- autoconf touch file \`configure'
- autoheader touch file \`config.h.in'
- autom4te touch the output file, or create a stub one
- automake touch all \`Makefile.in' files
- bison create \`y.tab.[ch]', if possible, from existing .[ch]
- flex create \`lex.yy.c', if possible, from existing .c
- help2man touch the output file
- lex create \`lex.yy.c', if possible, from existing .c
- makeinfo touch the output file
- tar try tar, gnutar, gtar, then tar without non-portable flags
- yacc create \`y.tab.[ch]', if possible, from existing .[ch]
-
-Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and
-\`g' are ignored when checking the name.
-
-Send bug reports to <bug-automake@gnu.org>."
- exit $?
- ;;
-
- -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
- echo "missing $scriptversion (GNU Automake)"
- exit $?
- ;;
-
- -*)
- echo 1>&2 "$0: Unknown \`$1' option"
- echo 1>&2 "Try \`$0 --help' for more information"
- exit 1
- ;;
-
-esac
-
-# normalize program name to check for.
-program=`echo "$1" | sed '
- s/^gnu-//; t
- s/^gnu//; t
- s/^g//; t'`
-
-# Now exit if we have it, but it failed. Also exit now if we
-# don't have it and --version was passed (most likely to detect
-# the program). This is about non-GNU programs, so use $1 not
-# $program.
-case $1 in
- lex*|yacc*)
- # Not GNU programs, they don't have --version.
- ;;
-
- tar*)
- if test -n "$run"; then
- echo 1>&2 "ERROR: \`tar' requires --run"
- exit 1
- elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
- exit 1
- fi
- ;;
-
- *)
- if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
- # We have it, but it failed.
- exit 1
- elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
- # Could not run --version or --help. This is probably someone
- # running `$TOOL --version' or `$TOOL --help' to check whether
- # $TOOL exists and not knowing $TOOL uses missing.
- exit 1
- fi
- ;;
-esac
-
-# If it does not exist, or fails to run (possibly an outdated version),
-# try to emulate it.
-case $program in
- aclocal*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified \`acinclude.m4' or \`${configure_ac}'. You might want
- to install the \`Automake' and \`Perl' packages. Grab them from
- any GNU archive site."
- touch aclocal.m4
- ;;
-
- autoconf*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified \`${configure_ac}'. You might want to install the
- \`Autoconf' and \`GNU m4' packages. Grab them from any GNU
- archive site."
- touch configure
- ;;
-
- autoheader*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified \`acconfig.h' or \`${configure_ac}'. You might want
- to install the \`Autoconf' and \`GNU m4' packages. Grab them
- from any GNU archive site."
- files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}`
- test -z "$files" && files="config.h"
- touch_files=
- for f in $files; do
- case $f in
- *:*) touch_files="$touch_files "`echo "$f" |
- sed -e 's/^[^:]*://' -e 's/:.*//'`;;
- *) touch_files="$touch_files $f.in";;
- esac
- done
- touch $touch_files
- ;;
-
- automake*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'.
- You might want to install the \`Automake' and \`Perl' packages.
- Grab them from any GNU archive site."
- find . -type f -name Makefile.am -print |
- sed 's/\.am$/.in/' |
- while read f; do touch "$f"; done
- ;;
-
- autom4te*)
- echo 1>&2 "\
-WARNING: \`$1' is needed, but is $msg.
- You might have modified some files without having the
- proper tools for further handling them.
- You can get \`$1' as part of \`Autoconf' from any GNU
- archive site."
-
- file=`echo "$*" | sed -n "$sed_output"`
- test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
- if test -f "$file"; then
- touch $file
- else
- test -z "$file" || exec >$file
- echo "#! /bin/sh"
- echo "# Created by GNU Automake missing as a replacement of"
- echo "# $ $@"
- echo "exit 0"
- chmod +x $file
- exit 1
- fi
- ;;
-
- bison*|yacc*)
- echo 1>&2 "\
-WARNING: \`$1' $msg. You should only need it if
- you modified a \`.y' file. You may need the \`Bison' package
- in order for those modifications to take effect. You can get
- \`Bison' from any GNU archive site."
- rm -f y.tab.c y.tab.h
- if test $# -ne 1; then
- eval LASTARG="\${$#}"
- case $LASTARG in
- *.y)
- SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'`
- if test -f "$SRCFILE"; then
- cp "$SRCFILE" y.tab.c
- fi
- SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'`
- if test -f "$SRCFILE"; then
- cp "$SRCFILE" y.tab.h
- fi
- ;;
- esac
- fi
- if test ! -f y.tab.h; then
- echo >y.tab.h
- fi
- if test ! -f y.tab.c; then
- echo 'main() { return 0; }' >y.tab.c
- fi
- ;;
-
- lex*|flex*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified a \`.l' file. You may need the \`Flex' package
- in order for those modifications to take effect. You can get
- \`Flex' from any GNU archive site."
- rm -f lex.yy.c
- if test $# -ne 1; then
- eval LASTARG="\${$#}"
- case $LASTARG in
- *.l)
- SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'`
- if test -f "$SRCFILE"; then
- cp "$SRCFILE" lex.yy.c
- fi
- ;;
- esac
- fi
- if test ! -f lex.yy.c; then
- echo 'main() { return 0; }' >lex.yy.c
- fi
- ;;
-
- help2man*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified a dependency of a manual page. You may need the
- \`Help2man' package in order for those modifications to take
- effect. You can get \`Help2man' from any GNU archive site."
-
- file=`echo "$*" | sed -n "$sed_output"`
- test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
- if test -f "$file"; then
- touch $file
- else
- test -z "$file" || exec >$file
- echo ".ab help2man is required to generate this page"
- exit $?
- fi
- ;;
-
- makeinfo*)
- echo 1>&2 "\
-WARNING: \`$1' is $msg. You should only need it if
- you modified a \`.texi' or \`.texinfo' file, or any other file
- indirectly affecting the aspect of the manual. The spurious
- call might also be the consequence of using a buggy \`make' (AIX,
- DU, IRIX). You might want to install the \`Texinfo' package or
- the \`GNU make' package. Grab either from any GNU archive site."
- # The file to touch is that specified with -o ...
- file=`echo "$*" | sed -n "$sed_output"`
- test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
- if test -z "$file"; then
- # ... or it is the one specified with @setfilename ...
- infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'`
- file=`sed -n '
- /^@setfilename/{
- s/.* \([^ ]*\) *$/\1/
- p
- q
- }' $infile`
- # ... or it is derived from the source name (dir/f.texi becomes f.info)
- test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info
- fi
- # If the file does not exist, the user really needs makeinfo;
- # let's fail without touching anything.
- test -f $file || exit 1
- touch $file
- ;;
-
- tar*)
- shift
-
- # We have already tried tar in the generic part.
- # Look for gnutar/gtar before invocation to avoid ugly error
- # messages.
- if (gnutar --version > /dev/null 2>&1); then
- gnutar "$@" && exit 0
- fi
- if (gtar --version > /dev/null 2>&1); then
- gtar "$@" && exit 0
- fi
- firstarg="$1"
- if shift; then
- case $firstarg in
- *o*)
- firstarg=`echo "$firstarg" | sed s/o//`
- tar "$firstarg" "$@" && exit 0
- ;;
- esac
- case $firstarg in
- *h*)
- firstarg=`echo "$firstarg" | sed s/h//`
- tar "$firstarg" "$@" && exit 0
- ;;
- esac
- fi
-
- echo 1>&2 "\
-WARNING: I can't seem to be able to run \`tar' with the given arguments.
- You may want to install GNU tar or Free paxutils, or check the
- command line arguments."
- exit 1
- ;;
-
- *)
- echo 1>&2 "\
-WARNING: \`$1' is needed, and is $msg.
- You might have modified some files without having the
- proper tools for further handling them. Check the \`README' file,
- it often tells you about the needed prerequisites for installing
- this package. You may also peek at any GNU archive site, in case
- some other package would contain this missing \`$1' program."
- exit 1
- ;;
-esac
-
-exit 0
-
-# Local variables:
-# eval: (add-hook 'write-file-hooks 'time-stamp)
-# time-stamp-start: "scriptversion="
-# time-stamp-format: "%:y-%02m-%02d.%02H"
-# time-stamp-time-zone: "UTC"
-# time-stamp-end: "; # UTC"
-# End:
diff --git a/scripts/training/compact-rule-table/tools/Makefile.am b/scripts/training/compact-rule-table/tools/Makefile.am
deleted file mode 100644
index fa476a2c7..000000000
--- a/scripts/training/compact-rule-table/tools/Makefile.am
+++ /dev/null
@@ -1,15 +0,0 @@
-AM_CPPFLAGS = $(BOOST_CPPFLAGS)
-AM_LDFLAGS = $(BOOST_PROGRAM_OPTIONS_LDFLAGS)
-LDADD = $(BOOST_PROGRAM_OPTIONS_LIBS)
-
-bin_PROGRAMS = compactify
-
-compactify_SOURCES = Compactify.cpp \
- Compactify.h \
- Compactify_Main.cpp \
- Exception.h \
- NumberedSet.h \
- Options.h \
- RuleTableParser.cpp \
- RuleTableParser.h \
- Tool.h
diff --git a/scripts/training/lexical-reordering/Jamfile b/scripts/training/lexical-reordering/Jamfile
new file mode 100644
index 000000000..322f4202a
--- /dev/null
+++ b/scripts/training/lexical-reordering/Jamfile
@@ -0,0 +1,3 @@
+exe score : reordering_classes.cpp score.cpp ../../..//z ;
+
+install dist : score : <location>. <install-type>EXE ;
diff --git a/scripts/training/lexical-reordering/Makefile b/scripts/training/lexical-reordering/Makefile
deleted file mode 100644
index f614043f4..000000000
--- a/scripts/training/lexical-reordering/Makefile
+++ /dev/null
@@ -1,15 +0,0 @@
-
-all: score
-
-clean:
- rm -f *.o
-
-.cpp.o:
- $(CXX) -O6 -g -c $<
-
-score: score.cpp reordering_classes.o
- $(CXX) -lz score.cpp reordering_classes.o -o score
-
-#reordering_classes.o: reordering_classes.h reordering_classes.cpp
-# $(CXX) reordering_classes.cpp
-
diff --git a/scripts/training/mbr/Makefile b/scripts/training/mbr/Makefile
deleted file mode 100755
index adb0b0e3b..000000000
--- a/scripts/training/mbr/Makefile
+++ /dev/null
@@ -1,14 +0,0 @@
-CXXFLAGS=-O3
-LDFLAGS=
-LDLIBS=
-
-all: mbr
-
-clean:
- rm -f *.o
-
-mert: $(OBJS)
- $(G++) $(OBJS) $(LDLIBS) -o $@
-
-mert_p: $(OBJS)
- $(G++) $(LDFLAGS) $(OBJS) $(LDLIBS) -o $@
diff --git a/scripts/training/mbr/mbr.cpp b/scripts/training/mbr/mbr.cpp
deleted file mode 100644
index 8004620cc..000000000
--- a/scripts/training/mbr/mbr.cpp
+++ /dev/null
@@ -1,398 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <vector>
-#include <map>
-#include <stdlib.h>
-#include <math.h>
-#include <algorithm>
-#include <stdio.h>
-#include <unistd.h>
-#include <cstring>
-#include <time.h>
-
-using namespace std ;
-
-
-/* Input :
- 1. a sorted n-best list, with duplicates filtered out in the following format
- 0 ||| amr moussa is currently on a visit to libya , tomorrow , sunday , to hold talks with regard to the in sudan . ||| 0 -4.94418 0 0 -2.16036 0 0 -81.4462 -106.593 -114.43 -105.55 -12.7873 -26.9057 -25.3715 -52.9336 7.99917 -24 ||| -4.58432
-
- 2. a weight vector
- 3. bleu order ( default = 4)
- 4. scaling factor to weigh the weight vector (default = 1.0)
-
- Output :
- translations that minimise the Bayes Risk of the n-best list
-
-
-*/
-
-int TABLE_LINE_MAX_LENGTH = 5000;
-vector<double> weights;
-float SCALE = 1.0;
-int BLEU_ORDER = 4;
-int SMOOTH = 1;
-int DEBUG = 0;
-double min_interval = 1e-4;
-
-#define SAFE_GETLINE(_IS, _LINE, _SIZE, _DELIM) {_IS.getline(_LINE, _SIZE, _DELIM); if(_IS.fail() && !_IS.bad() && !_IS.eof()) _IS.clear();}
-
-typedef string WORD;
-typedef unsigned int WORD_ID;
-
-
-map<WORD, WORD_ID> lookup;
-vector< WORD > vocab;
-
-class candidate_t
-{
-public:
- vector<WORD_ID> translation;
- vector<double> features;
- int translation_size;
-} ;
-
-
-void usage(void)
-{
- fprintf(stderr,
- "usage: mbr -s SCALE -n BLEU_ORDER -w weights.txt -i nbest.txt");
-}
-
-
-char *strstrsep(char **stringp, const char *delim)
-{
- char *match, *save;
- save = *stringp;
- if (*stringp == NULL)
- return NULL;
- match = strstr(*stringp, delim);
- if (match == NULL) {
- *stringp = NULL;
- return save;
- }
- *match = '\0';
- *stringp = match + strlen(delim);
- return save;
-}
-
-
-
-vector<string> tokenize( const char input[] )
-{
- vector< string > token;
- bool betweenWords = true;
- int start;
- int i=0;
- for(; input[i] != '\0'; i++) {
- bool isSpace = (input[i] == ' ' || input[i] == '\t');
-
- if (!isSpace && betweenWords) {
- start = i;
- betweenWords = false;
- } else if (isSpace && !betweenWords) {
- token.push_back( string( input+start, i-start ) );
- betweenWords = true;
- }
- }
- if (!betweenWords)
- token.push_back( string( input+start, i-start+1 ) );
- return token;
-}
-
-
-
-WORD_ID storeIfNew( WORD word )
-{
- if( lookup.find( word ) != lookup.end() )
- return lookup[ word ];
-
- WORD_ID id = vocab.size();
- vocab.push_back( word );
- lookup[ word ] = id;
- return id;
-}
-
-int count( string input, char delim )
-{
- int count = 0;
- for ( int i = 0; i < input.size(); i++) {
- if ( input[i] == delim)
- count++;
- }
- return count;
-}
-
-double calculate_probability(const vector<double> & feats, const vector<double> & weights,double SCALE)
-{
-
- if (feats.size() != weights.size())
- cerr << "ERROR : Number of features <> number of weights " << endl;
-
- double prob = 0;
- for ( int i = 0; i < feats.size(); i++) {
- prob += feats[i]*weights[i]*SCALE;
- }
- return exp(prob);
-}
-
-void extract_ngrams(const vector<WORD_ID>& sentence, map < vector < WORD_ID>, int > & allngrams)
-{
- vector< WORD_ID> ngram;
- for (int k = 0; k< BLEU_ORDER; k++) {
- for(int i =0; i < max((int)sentence.size()-k,0); i++) {
- for ( int j = i; j<= i+k; j++) {
- ngram.push_back(sentence[j]);
- }
- ++allngrams[ngram];
- ngram.clear();
- }
- }
-}
-
-
-double calculate_score(const vector<candidate_t*> & sents, int ref, int hyp, vector < map < vector < WORD_ID>, int > > & ngram_stats )
-{
- int comps_n = 2*BLEU_ORDER+1;
- int comps[comps_n];
- double logbleu = 0.0, brevity;
-
- int hyp_length = sents[hyp]->translation_size;
-
- for (int i =0; i<BLEU_ORDER; i++) {
- comps[2*i] = 0;
- comps[2*i+1] = max(hyp_length-i,0);
- }
-
- map< vector < WORD_ID > ,int > & hyp_ngrams = ngram_stats[hyp] ;
- map< vector < WORD_ID >, int > & ref_ngrams = ngram_stats[ref] ;
-
- for (map< vector< WORD_ID >, int >::iterator it = hyp_ngrams.begin();
- it != hyp_ngrams.end(); it++) {
- map< vector< WORD_ID >, int >::iterator ref_it = ref_ngrams.find(it->first);
- if(ref_it != ref_ngrams.end()) {
- comps[2* (it->first.size()-1)] += min(ref_it->second,it->second);
- }
- }
- comps[comps_n-1] = sents[ref]->translation_size;
-
- if (DEBUG) {
- for ( int i = 0; i < comps_n; i++)
- cerr << "Comp " << i << " : " << comps[i];
- }
-
- for (int i=0; i<BLEU_ORDER; i++) {
- if (comps[0] == 0)
- return 0.0;
- if ( i > 0 )
- logbleu += log(static_cast<double>(comps[2*i]+SMOOTH))-log(static_cast<double>(comps[2*i+1]+SMOOTH));
- else
- logbleu += log(static_cast<double>(comps[2*i]))-log(static_cast<double>(comps[2*i+1]));
- }
- logbleu /= BLEU_ORDER;
- brevity = 1.0-(double)comps[comps_n-1]/comps[1]; // comps[comps_n-1] is the ref length, comps[1] is the test length
- if (brevity < 0.0)
- logbleu += brevity;
- return exp(logbleu);
-}
-
-vector<double> read_weights(string fileName)
-{
- ifstream inFile;
- inFile.open(fileName.c_str());
- istream *inFileP = &inFile;
-
- char line[TABLE_LINE_MAX_LENGTH];
- int i=0;
- vector<double> weights;
-
- while(true) {
- i++;
- SAFE_GETLINE((*inFileP), line, TABLE_LINE_MAX_LENGTH, '\n');
- if (inFileP->eof()) break;
- vector<string> token = tokenize(line);
-
- for (int j = 0; j < token.size(); j++) {
- weights.push_back(atof(token[j].c_str()));
- }
- }
- cerr << endl;
- return weights;
-}
-
-int find_pos_of_min_element(const vector<double>& vec)
-{
-
- int min_pos = -1;
- double min_element = 10000;
- for ( int i = 0; i < vec.size(); i++) {
- if (vec[i] < min_element) {
- min_element = vec[i];
- min_pos = i;
- }
- }
- /* cerr << "Min pos is : " << min_pos << endl;
- cerr << "Min mbr loss is : " << min_element << endl;*/
- return min_pos;
-}
-
-void process(int sent, const vector<candidate_t*> & sents)
-{
-// cerr << "Sentence " << sent << " has " << sents.size() << " candidate translations" << endl;
- double marginal = 0;
-
- vector<double> joint_prob_vec;
- double joint_prob;
- vector< map < vector <WORD_ID>, int > > ngram_stats;
-
- for (int i = 0; i < sents.size(); i++) {
-// cerr << "Sents " << i << " has trans : " << sents[i]->translation << endl;
- //Calculate marginal and cache the posteriors
- joint_prob = calculate_probability(sents[i]->features,weights,SCALE);
- marginal += joint_prob;
- joint_prob_vec.push_back(joint_prob);
- //Cache ngram counts
- map < vector <WORD_ID>, int > counts;
- extract_ngrams(sents[i]->translation,counts);
- ngram_stats.push_back(counts);
- }
- //cerr << "Marginal is " << marginal;
-
- vector<double> mbr_loss;
- double bleu, weightedLoss;
- double weightedLossCumul = 0;
- double minMBRLoss = 1000000;
- int minMBRLossIdx = -1;
-
- /* Main MBR computation done here */
- for (int i = 0; i < sents.size(); i++) {
- weightedLossCumul = 0;
- for (int j = 0; j < sents.size(); j++) {
- if ( i != j) {
- bleu = calculate_score(sents, j, i,ngram_stats );
- weightedLoss = ( 1 - bleu) * ( joint_prob_vec[j]/marginal);
- weightedLossCumul += weightedLoss;
- if (weightedLossCumul > minMBRLoss)
- break;
- }
- }
- if (weightedLossCumul < minMBRLoss) {
- minMBRLoss = weightedLossCumul;
- minMBRLossIdx = i;
- }
- }
-// cerr << "Min pos is : " << minMBRLossIdx << endl;
-// cerr << "Min mbr loss is : " << minMBRLoss << endl;
- /* Find sentence that minimises Bayes Risk under 1- BLEU loss */
- vector< WORD_ID > best_translation = sents[minMBRLossIdx]->translation;
- for (int i = 0; i < best_translation.size(); i++)
- cout << vocab[best_translation[i]] << " " ;
- cout << endl;
-}
-
-
-void read_nbest_data(string fileName)
-{
-
- FILE * fp;
- fp = fopen (fileName.c_str() , "r");
-
- static char buf[10000];
- char *rest, *tok;
- int field;
- int sent_i, cur_sent;
- candidate_t *cand = NULL;
- vector<candidate_t*> testsents;
-
- cur_sent = -1;
-
- while (fgets(buf, sizeof(buf), fp) != NULL) {
- field = 0;
- rest = buf;
- while ((tok = strstrsep(&rest, "|||")) != NULL) {
- if (field == 0) {
- sent_i = strtol(tok, NULL, 10);
- cand = new candidate_t;
- } else if (field == 2) {
- vector<double> features;
- char * subtok;
- subtok = strtok (tok," ");
-
- while (subtok != NULL) {
- features.push_back(atof(subtok));
- subtok = strtok (NULL, " ");
- }
- cand->features = features;
- } else if (field == 1) {
- vector<string> trans_str = tokenize(tok);
- vector<WORD_ID> trans_int;
- for (int j=0; j<trans_str.size(); j++) {
- trans_int.push_back( storeIfNew( trans_str[j] ) );
- }
- cand->translation= trans_int;
- cand->translation_size = cand->translation.size();
- } else if (field == 3) {
- continue;
- } else {
- fprintf(stderr, "too many fields in n-best list line\n");
- }
- field++;
- }
- if (sent_i != cur_sent) {
- if (cur_sent != - 1) {
- process(cur_sent,testsents);
- }
- cur_sent = sent_i;
- testsents.clear();
- }
- testsents.push_back(cand);
- }
- process(cur_sent,testsents);
- cerr << endl;
-}
-
-int main(int argc, char **argv)
-{
-
- time_t starttime = time(NULL);
- int c;
-
- string f_weight = "";
- string f_nbest = "";
-
- while ((c = getopt(argc, argv, "s:w:n:i:")) != -1) {
- switch (c) {
- case 's':
- SCALE = atof(optarg);
- break;
- case 'n':
- BLEU_ORDER = atoi(optarg);
- break;
- case 'w':
- f_weight = optarg;
- break;
- case 'i':
- f_nbest = optarg;
- break;
- default:
- usage();
- }
- }
-
- argc -= optind;
- argv += optind;
-
- if (argc < 2) {
- usage();
- }
-
-
- weights = read_weights(f_weight);
- read_nbest_data(f_nbest);
-
- time_t endtime = time(NULL);
- cerr << "Processed data in" << (endtime-starttime) << " seconds\n";
-}
-
diff --git a/scripts/training/mert-moses.pl b/scripts/training/mert-moses.pl
index ef5890385..cf899dc4c 100755
--- a/scripts/training/mert-moses.pl
+++ b/scripts/training/mert-moses.pl
@@ -47,9 +47,13 @@
# 13 Oct 2004 Use alternative decoders (DWC)
# Original version by Philipp Koehn
+use strict;
use FindBin qw($Bin);
use File::Basename;
use File::Path;
+use File::Spec;
+use Cwd;
+
my $SCRIPTS_ROOTDIR = $Bin;
$SCRIPTS_ROOTDIR =~ s/\/training$//;
$SCRIPTS_ROOTDIR = $ENV{"SCRIPTS_ROOTDIR"} if defined($ENV{"SCRIPTS_ROOTDIR"});
@@ -82,7 +86,10 @@ my $minimum_required_change_in_weights = 0.00001;
my $verbose = 0;
my $usage = 0; # request for --help
-my $___WORKING_DIR = "mert-work";
+
+# We assume that if you don't specify working directory,
+# we set the default is set to `pwd`/mert-work
+my $___WORKING_DIR = File::Spec->catfile(Cwd::getcwd(), "mert-work");
my $___DEV_F = undef; # required, input text to decode
my $___DEV_E = undef; # required, basename of files with references
my $___DECODER = undef; # required, pathname to the decoder executable
@@ -144,10 +151,9 @@ my $prev_aggregate_nbl_size = -1; # number of previous step to consider when loa
# -1 means all previous, i.e. from iteration 1
# 0 means no previous data, i.e. from actual iteration
# 1 means 1 previous data , i.e. from the actual iteration and from the previous one
- # and so on
+ # and so on
my $maximum_iterations = 25;
-use strict;
use Getopt::Long;
GetOptions(
"working-dir=s" => \$___WORKING_DIR,
@@ -1298,19 +1304,16 @@ sub submit_or_exec {
sub create_extractor_script()
{
my ($cmd, $outdir) = @_;
+ my $script_path = File::Spec->catfile($outdir, "extractor.sh");
- my $script_path = $outdir."/extractor.sh";
-
- open(OUT,"> $script_path")
- or die "Can't write $script_path";
- print OUT "#!/bin/bash\n";
- print OUT "cd $outdir\n";
- print OUT $cmd."\n";
- close(OUT);
+ open my $out, '>', $script_path
+ or die "Couldn't open $script_path for writing: $!\n";
+ print $out "#!/bin/bash\n";
+ print $out "cd $outdir\n";
+ print $out "$cmd\n";
+ close($out);
`chmod +x $script_path`;
- return $script_path;
+ return $script_path;
}
-
-
diff --git a/scripts/training/phrase-extract/Jamfile b/scripts/training/phrase-extract/Jamfile
new file mode 100644
index 000000000..369d8cc00
--- /dev/null
+++ b/scripts/training/phrase-extract/Jamfile
@@ -0,0 +1,26 @@
+alias InputFileStream : InputFileStream.cpp ../../..//z ;
+alias trees : SyntaxTree.cpp XmlTree.cpp : : : <include>. ;
+
+exe extract : tables-core.cpp SentenceAlignment.cpp extract.cpp InputFileStream ;
+
+exe extract-rules : tables-core.cpp SentenceAlignment.cpp SentenceAlignmentWithSyntax.cpp SyntaxTree.cpp XmlTree.cpp HoleCollection.cpp extract-rules.cpp ExtractedRule.cpp InputFileStream ;
+
+exe extract-lex : extract-lex.cpp InputFileStream ;
+
+exe score : tables-core.cpp AlignmentPhrase.cpp score.cpp PhraseAlignment.cpp InputFileStream ;
+
+exe consolidate : consolidate.cpp tables-core.cpp InputFileStream ;
+
+exe consolidate-direct : consolidate-direct.cpp InputFileStream ;
+
+exe consolidate-reverse : consolidate-reverse.cpp tables-core.cpp InputFileStream ;
+
+exe relax-parse : tables-core.cpp SyntaxTree.cpp XmlTree.cpp relax-parse.cpp ;
+
+exe statistics : tables-core.cpp AlignmentPhrase.cpp statistics.cpp InputFileStream ;
+
+alias programs : extract extract-rules extract-lex score consolidate consolidate-direct consolidate-reverse relax-parse statistics ;
+
+install legacy : programs : <location>. <install-type>EXE ;
+
+build-project extract-ghkm ;
diff --git a/scripts/training/phrase-extract/Makefile b/scripts/training/phrase-extract/Makefile
deleted file mode 100644
index e47623dd2..000000000
--- a/scripts/training/phrase-extract/Makefile
+++ /dev/null
@@ -1,37 +0,0 @@
-all: consolidate consolidate-direct consolidate-reverse extract extract-rules relax-parse \
- score statistics extract-lex
-
-clean:
- rm -f *.o
-
-.cpp.o:
- $(CXX) -O6 -g -c $<
-
-extract: tables-core.o SentenceAlignment.o extract.o InputFileStream.o
- $(CXX) $^ -lz -o extract
-
-extract-rules: tables-core.o SentenceAlignment.o SentenceAlignmentWithSyntax.o SyntaxTree.o XmlTree.o HoleCollection.o extract-rules.o ExtractedRule.o InputFileStream.o
- $(CXX) $^ -lz -o extract-rules
-
-extract-lex: extract-lex.o InputFileStream.o
- $(CXX) $^ -lz -o extract-lex
-
-score: tables-core.o AlignmentPhrase.o score.o PhraseAlignment.o InputFileStream.o
- $(CXX) $^ -lz -o score
-
-consolidate: consolidate.o tables-core.o InputFileStream.o
- $(CXX) $^ -lz -o consolidate
-
-consolidate-direct: consolidate-direct.o InputFileStream.o
- $(CXX) $^ -lz -o consolidate-direct
-
-consolidate-reverse: consolidate-reverse.o tables-core.o InputFileStream.o
- $(CXX) $^ -lz -o consolidate-reverse
-
-relax-parse: tables-core.o SyntaxTree.o XmlTree.o relax-parse.o
- $(CXX) $^ -o relax-parse
-
-statistics: tables-core.o AlignmentPhrase.o statistics.o InputFileStream.o
- $(CXX) $^ -lz -o statistics
-
-
diff --git a/scripts/training/phrase-extract/XmlTree.h b/scripts/training/phrase-extract/XmlTree.h
index 439828d32..7e6bbecea 100644
--- a/scripts/training/phrase-extract/XmlTree.h
+++ b/scripts/training/phrase-extract/XmlTree.h
@@ -33,3 +33,4 @@ std::string TrimXml(const std::string& str);
bool isXmlTag(const std::string& tag);
std::vector<std::string> TokenizeXml(const std::string& str);
bool ProcessAndStripXMLTags(std::string &line, SyntaxTree &tree, std::set< std::string > &labelCollection, std::map< std::string, int > &topLabelCollection );
+std::string unescape(const std::string &str);
diff --git a/scripts/training/phrase-extract/extract-ghkm/Alignment.cpp b/scripts/training/phrase-extract/extract-ghkm/Alignment.cpp
index eb67fe8fd..fcd5e14e1 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Alignment.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/Alignment.cpp
@@ -1,21 +1,21 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "Alignment.h"
@@ -24,8 +24,10 @@
#include <cassert>
#include <cstdlib>
-Alignment
-readAlignment(const std::string & s)
+namespace Moses {
+namespace GHKM {
+
+Alignment ReadAlignment(const std::string &s)
{
Alignment a;
@@ -67,3 +69,6 @@ readAlignment(const std::string & s)
return a;
}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/Alignment.h b/scripts/training/phrase-extract/extract-ghkm/Alignment.h
index c1ca9e586..bc42191e1 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Alignment.h
+++ b/scripts/training/phrase-extract/extract-ghkm/Alignment.h
@@ -1,33 +1,38 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef ALIGNMENT_H_INCLUDED_
-#define ALIGNMENT_H_INCLUDED_
+#ifndef EXTRACT_GHKM_ALIGNMENT_H_
+#define EXTRACT_GHKM_ALIGNMENT_H_
#include <string>
#include <utility>
#include <vector>
+namespace Moses {
+namespace GHKM {
+
typedef std::vector<std::pair<int, int> > Alignment;
-Alignment
-readAlignment(const std::string &);
+Alignment ReadAlignment(const std::string &);
+
+} // namespace GHKM
+} // namespace Moses
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.cpp b/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.cpp
index 35c89b5c6..c1d132648 100644
--- a/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.cpp
@@ -1,350 +1,382 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "AlignmentGraph.h"
-#include "Rule.h"
+#include "ComposedRule.h"
+#include "Node.h"
+#include "Options.h"
+#include "ParseTree.h"
#include "Subgraph.h"
#include <algorithm>
#include <cassert>
#include <memory>
+#include <stack>
-namespace
-{
-Node *
-copyParseTree(const ParseTree * root, std::vector<Node *> & nodes)
+namespace Moses {
+namespace GHKM {
+
+AlignmentGraph::AlignmentGraph(const ParseTree *t,
+ const std::vector<std::string> &s,
+ const Alignment &a)
{
- NodeType nodeType = (root->isLeaf()) ? TARGET : TREE;
+ // Copy the parse tree nodes and add them to m_targetNodes.
+ m_root = CopyParseTree(t);
- std::auto_ptr<Node> n(new Node(root->getLabel(), nodeType));
+ // Create a node for each source word.
+ m_sourceNodes.reserve(s.size());
+ for (std::vector<std::string>::const_iterator p(s.begin());
+ p != s.end(); ++p) {
+ m_sourceNodes.push_back(new Node(*p, SOURCE));
+ }
- const std::vector<ParseTree *> & children = root->getChildren();
- std::vector<Node *> childNodes;
- childNodes.reserve(children.size());
- for (std::vector<ParseTree *>::const_iterator p(children.begin());
- p != children.end(); ++p) {
- Node * child = copyParseTree(*p, nodes);
- child->addParent(n.get());
- childNodes.push_back(child);
+ // Connect source nodes to parse tree leaves according to the given word
+ // alignment.
+ std::vector<Node *> targetTreeLeaves;
+ GetTargetTreeLeaves(m_root, targetTreeLeaves);
+ for (Alignment::const_iterator p(a.begin()); p != a.end(); ++p) {
+ Node *src = m_sourceNodes[p->first];
+ Node *tgt = targetTreeLeaves[p->second];
+ src->AddParent(tgt);
+ tgt->AddChild(src);
}
- n->setChildren(childNodes);
- Node * n2 = n.release();
- nodes.push_back(n2);
- return n2;
+ // Attach unaligned source words (if any).
+ AttachUnalignedSourceWords();
+
+ // Populate node spans.
+ std::vector<Node *>::const_iterator p(m_sourceNodes.begin());
+ for (int i = 0; p != m_sourceNodes.end(); ++p, ++i) {
+ (*p)->PropagateIndex(i);
+ }
+
+ // Calculate complement spans.
+ CalcComplementSpans(m_root);
}
-void
-computeFrontierSet(Node * root, std::set<Node *> & frontierSet)
+AlignmentGraph::~AlignmentGraph()
{
- // TODO Return if frontierSet already contains entry for root
- // TODO Or maintain set of visited nodes?
-
- if (!spansIntersect(root->getComplementSpan(), closure(root->getSpan()))) {
- frontierSet.insert(root);
+ for (std::vector<Node *>::iterator p(m_sourceNodes.begin());
+ p != m_sourceNodes.end(); ++p) {
+ delete *p;
}
-
- const std::vector<Node *> & children = root->getChildren();
- for (std::vector<Node *>::const_iterator p(children.begin());
- p != children.end(); ++p) {
- computeFrontierSet(*p, frontierSet);
+ for (std::vector<Node *>::iterator p(m_targetNodes.begin());
+ p != m_targetNodes.end(); ++p) {
+ delete *p;
}
}
-void
-calcComplementSpans(Node * root)
+Subgraph AlignmentGraph::ComputeMinimalFrontierGraphFragment(
+ Node *root,
+ const std::set<Node *> &frontierSet)
{
- Span & compSpan = root->getComplementSpan();
+ std::stack<Node *> expandableNodes;
+ std::set<const Node *> expandedNodes;
- std::set<Node *> siblings;
-
- const std::vector<Node *> & parents = root->getParents();
- for (std::vector<Node *>::const_iterator p(parents.begin());
- p != parents.end(); ++p) {
- const Span & parentCompSpan = (*p)->getComplementSpan();
- compSpan.insert(parentCompSpan.begin(), parentCompSpan.end());
- const std::vector<Node *> & c = (*p)->getChildren();
- siblings.insert(c.begin(), c.end());
+ if (root->IsSink()) {
+ expandedNodes.insert(root);
+ } else {
+ expandableNodes.push(root);
}
- for (std::set<Node *>::iterator p(siblings.begin());
- p != siblings.end(); ++p) {
- if (*p == root) {
- continue;
- }
- const Span & siblingSpan = (*p)->getSpan();
- compSpan.insert(siblingSpan.begin(), siblingSpan.end());
- }
+ while (!expandableNodes.empty()) {
+ Node *n = expandableNodes.top();
+ expandableNodes.pop();
- const std::vector<Node *> & children = root->getChildren();
- for (std::vector<Node *>::const_iterator p(children.begin());
- p != children.end(); ++p) {
- calcComplementSpans(*p);
- }
-}
+ const std::vector<Node *> &children = n->GetChildren();
-void
-getTargetTreeLeaves(Node * root, std::vector<Node *> & leaves)
-{
- if (root->isSink()) {
- leaves.push_back(root);
- } else {
- const std::vector<Node *> & children = root->getChildren();
for (std::vector<Node *>::const_iterator p(children.begin());
p != children.end(); ++p) {
- getTargetTreeLeaves(*p, leaves);
+ Node *child = *p;
+ if (child->IsSink()) {
+ expandedNodes.insert(child);
+ continue;
+ }
+ std::set<Node *>::const_iterator q = frontierSet.find(child);
+ if (q == frontierSet.end()) { //child is not from the frontier set
+ expandableNodes.push(child);
+ } else if (child->GetType() == TARGET) { // still need source word
+ expandableNodes.push(child);
+ } else {
+ expandedNodes.insert(child);
+ }
}
}
+
+ return Subgraph(root, expandedNodes);
}
-bool
-partitionOrderComp(const Node * a, const Node * b)
+void AlignmentGraph::ExtractMinimalRules(const Options &options)
{
- const Span & aSpan = a->getSpan();
- const Span & bSpan = b->getSpan();
-
- assert(!aSpan.empty() && !bSpan.empty());
+ // Determine which nodes are frontier nodes.
+ std::set<Node *> frontierSet;
+ ComputeFrontierSet(m_root, options, frontierSet);
- return *(aSpan.begin()) < *(bSpan.begin());
+ // Form the minimal frontier graph fragment rooted at each frontier node.
+ std::vector<Subgraph> fragments;
+ fragments.reserve(frontierSet.size());
+ for (std::set<Node *>::iterator p(frontierSet.begin());
+ p != frontierSet.end(); ++p) {
+ Node *root = *p;
+ Subgraph fragment = ComputeMinimalFrontierGraphFragment(root, frontierSet);
+ assert(!fragment.IsTrivial());
+ // Can it form an SCFG rule?
+ // FIXME Does this exclude non-lexical unary rules?
+ if (root->GetType() == TREE && !root->GetSpan().empty()) {
+ root->AddRule(new Subgraph(fragment));
+ }
+ }
}
-Rule
-fragmentToRule(const Subgraph & fragment)
+void AlignmentGraph::ExtractComposedRules(const Options &options)
{
- // Source RHS
-
- std::set<Node *> sinkNodes(fragment.getSinkNodes());
+ ExtractComposedRules(m_root, options);
+}
- std::vector<Node *> sourceRHSNodes;
- for (std::set<Node *>::const_iterator p(sinkNodes.begin());
- p != sinkNodes.end(); ++p) {
- const Node & sinkNode = **p;
- if (!sinkNode.getSpan().empty()) {
- sourceRHSNodes.push_back(*p);
- }
+void AlignmentGraph::ExtractComposedRules(Node *node, const Options &options)
+{
+ // Extract composed rules for all children first.
+ const std::vector<Node *> &children = node->GetChildren();
+ for (std::vector<Node *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ ExtractComposedRules(*p, options);
}
- std::sort(sourceRHSNodes.begin(), sourceRHSNodes.end(),
- partitionOrderComp);
-
- // Build a mapping from target nodes to source-order indices, so that we
- // can construct the Alignment object later.
- std::map<const Node *, std::vector<int> > sourceOrder;
-
- std::vector<Symbol> sourceRHS;
- int srcIndex = 0;
- for (std::vector<Node *>::const_iterator p(sourceRHSNodes.begin());
- p != sourceRHSNodes.end(); ++p, ++srcIndex) {
- const Node & sinkNode = **p;
- if (sinkNode.getType() == TREE) {
- sourceRHS.push_back(Symbol("X", NonTerminal));
- sourceOrder[&sinkNode].push_back(srcIndex);
- } else {
- assert(sinkNode.getType() == SOURCE);
- sourceRHS.push_back(Symbol(sinkNode.getLabel(), Terminal));
- // Add all aligned target words to the sourceOrder map
- const std::vector<Node *> & parents(sinkNode.getParents());
- for (std::vector<Node *>::const_iterator q(parents.begin());
- q != parents.end(); ++q) {
- assert((*q)->getType() == TARGET);
- sourceOrder[*q].push_back(srcIndex);
- }
- }
+ // If there is no minimal rule for this node then there are no composed
+ // rules.
+ const std::vector<const Subgraph*> &rules = node->GetRules();
+ assert(rules.size() <= 1);
+ if (rules.empty()) {
+ return;
}
- // Target RHS + alignment
-
- std::vector<Symbol> targetRHS;
- Alignment alignment;
-
- std::vector<Node *> leafNodes(fragment.getLeafNodes());
-
- alignment.reserve(leafNodes.size()); // might be too much but that's OK
- targetRHS.reserve(leafNodes.size());
+ // Construct an initial composition candidate from the minimal rule.
+ ComposedRule cr(*(rules[0]));
+ if (!cr.GetOpenAttachmentPoint()) {
+ // No composition possible.
+ return;
+ }
- for (std::vector<Node *>::const_iterator p(leafNodes.begin());
- p != leafNodes.end(); ++p) {
- const Node & leaf = **p;
- if (leaf.getSpan().empty()) {
- // The node doesn't cover any source words, so we can only add
- // terminals to the target RHS (not a non-terminal).
- std::vector<std::string> targetWords(leaf.getTargetWords());
- for (std::vector<std::string>::const_iterator q(targetWords.begin());
- q != targetWords.end(); ++q) {
- targetRHS.push_back(Symbol(*q, Terminal));
- }
- } else {
- SymbolType type = (leaf.getType() == TREE) ? NonTerminal : Terminal;
- targetRHS.push_back(Symbol(leaf.getLabel(), type));
-
- int tgtIndex = targetRHS.size()-1;
- std::map<const Node *, std::vector<int> >::iterator q(sourceOrder.find(&leaf));
- assert(q != sourceOrder.end());
- std::vector<int> & sourceNodes = q->second;
- for (std::vector<int>::iterator r(sourceNodes.begin());
- r != sourceNodes.end(); ++r) {
- int srcIndex = *r;
- alignment.push_back(std::make_pair(srcIndex, tgtIndex));
+ std::queue<ComposedRule> queue;
+ queue.push(cr);
+ while (!queue.empty()) {
+ ComposedRule cr = queue.front();
+ queue.pop();
+ const Node *attachmentPoint = cr.GetOpenAttachmentPoint();
+ assert(attachmentPoint);
+ assert(attachmentPoint != node);
+ // Create all possible rules by composing this node's minimal rule with the
+ // existing rules (both minimal and composed) rooted at the first open
+ // attachment point.
+ const std::vector<const Subgraph*> &rules = attachmentPoint->GetRules();
+ for (std::vector<const Subgraph*>::const_iterator p = rules.begin();
+ p != rules.end(); ++p) {
+ assert((*p)->GetRoot()->GetType() == TREE);
+ ComposedRule *cr2 = cr.AttemptComposition(**p, options);
+ if (cr2) {
+ node->AddRule(new Subgraph(cr2->CreateSubgraph()));
+ if (cr2->GetOpenAttachmentPoint()) {
+ queue.push(*cr2);
+ }
+ delete cr2;
}
}
+ // Done with this attachment point. Advance to the next, if any.
+ cr.CloseAttachmentPoint();
+ if (cr.GetOpenAttachmentPoint()) {
+ queue.push(cr);
+ }
}
+}
- assert(!alignment.empty());
+Node *AlignmentGraph::CopyParseTree(const ParseTree *root)
+{
+ NodeType nodeType = (root->IsLeaf()) ? TARGET : TREE;
- // Source LHS
- Symbol sourceLHS("X", NonTerminal);
+ std::auto_ptr<Node> n(new Node(root->GetLabel(), nodeType));
- // Target LHS
- Symbol targetLHS(fragment.getRoot()->getLabel(), NonTerminal);
+ const std::vector<ParseTree *> &children = root->GetChildren();
+ std::vector<Node *> childNodes;
+ childNodes.reserve(children.size());
+ for (std::vector<ParseTree *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ Node *child = CopyParseTree(*p);
+ child->AddParent(n.get());
+ childNodes.push_back(child);
+ }
+ n->SetChildren(childNodes);
- return Rule(sourceLHS, targetLHS, sourceRHS, targetRHS, alignment);
-}
+ Node *p = n.release();
+ m_targetNodes.push_back(p);
+ return p;
}
-void
-Node::setChildren(const std::vector<Node*> & children)
+// Finds the set of frontier nodes. The definition of a frontier node differs
+// from Galley et al's (2004) in the following ways:
+//
+// 1. A node with an empty span is not a frontier node (this excludes
+// unaligned target subtrees).
+// 2. Target word nodes are not frontier nodes.
+// 3. Source word nodes are not frontier nodes.
+// 4. Unless the --AllowUnary option is used, a node is not a frontier node if
+// it has the same span as its parent.
+void AlignmentGraph::ComputeFrontierSet(Node *root,
+ const Options &options,
+ std::set<Node *> &frontierSet) const
{
- m_children = children;
-}
+ // Don't include word nodes or unaligned target subtrees.
+ if (root->GetType() != TREE || root->GetSpan().empty()) {
+ return;
+ }
-void
-Node::setParents(const std::vector<Node*> & parents)
-{
- m_parents = parents;
-}
+ if (!SpansIntersect(root->GetComplementSpan(), Closure(root->GetSpan()))) {
+ // Unless unary rules are explicitly allowed, we use Chung et al's (2011)
+ // modified defintion of a frontier node to eliminate the production of
+ // non-lexical unary rules.
+ assert(root->GetParents().size() <= 1);
+ if (options.allowUnary
+ || root->GetParents().empty()
+ || root->GetParents()[0]->GetSpan() != root->GetSpan()) {
+ frontierSet.insert(root);
+ }
+ }
-void
-Node::addChild(Node * child)
-{
- m_children.push_back(child);
+ const std::vector<Node *> &children = root->GetChildren();
+ for (std::vector<Node *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ ComputeFrontierSet(*p, options, frontierSet);
+ }
}
-void
-Node::addParent(Node * parent)
+void AlignmentGraph::CalcComplementSpans(Node *root)
{
- m_parents.push_back(parent);
-}
+ Span compSpan;
+ std::set<Node *> siblings;
-bool
-Node::isSink() const
-{
- return m_children.empty();
-}
+ const std::vector<Node *> &parents = root->GetParents();
+ for (std::vector<Node *>::const_iterator p(parents.begin());
+ p != parents.end(); ++p) {
+ const Span &parentCompSpan = (*p)->GetComplementSpan();
+ compSpan.insert(parentCompSpan.begin(), parentCompSpan.end());
+ const std::vector<Node *> &c = (*p)->GetChildren();
+ siblings.insert(c.begin(), c.end());
+ }
-void
-Node::propagateIndex(int index)
-{
- m_span.insert(index);
- for (std::vector<Node *>::const_iterator p(m_parents.begin());
- p != m_parents.end(); ++p) {
- (*p)->propagateIndex(index);
+ for (std::set<Node *>::iterator p(siblings.begin());
+ p != siblings.end(); ++p) {
+ if (*p == root) {
+ continue;
+ }
+ const Span &siblingSpan = (*p)->GetSpan();
+ compSpan.insert(siblingSpan.begin(), siblingSpan.end());
}
-}
-std::vector<std::string>
-Node::getTargetWords() const
-{
- std::vector<std::string> targetWords;
- getTargetWords(targetWords);
- return targetWords;
+ root->SetComplementSpan(compSpan);
+
+ const std::vector<Node *> &children = root->GetChildren();
+ for (std::vector<Node *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ CalcComplementSpans(*p);
+ }
}
-void
-Node::getTargetWords(std::vector<std::string> & targetWords) const
+void AlignmentGraph::GetTargetTreeLeaves(Node *root,
+ std::vector<Node *> &leaves)
{
- if (m_type == TARGET) {
- targetWords.push_back(m_label);
+ if (root->IsSink()) {
+ leaves.push_back(root);
} else {
- for (std::vector<Node *>::const_iterator p(m_children.begin());
- p != m_children.end(); ++p) {
- (*p)->getTargetWords(targetWords);
+ const std::vector<Node *> &children = root->GetChildren();
+ for (std::vector<Node *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ GetTargetTreeLeaves(*p, leaves);
}
}
}
-AlignmentGraph::AlignmentGraph(const ParseTree * t,
- const std::vector<std::string> & s,
- const Alignment & a)
+void AlignmentGraph::AttachUnalignedSourceWords()
{
- m_root = copyParseTree(t, m_targetNodes);
-
- m_sourceNodes.reserve(s.size());
- for (std::vector<std::string>::const_iterator p(s.begin());
- p != s.end(); ++p) {
- m_sourceNodes.push_back(new Node(*p, SOURCE));
+ // Find the unaligned source words (if any).
+ std::set<int> unaligned;
+ for (int i = 0; i < m_sourceNodes.size(); ++i) {
+ const Node &sourceNode = (*m_sourceNodes[i]);
+ if (sourceNode.GetParents().empty()) {
+ unaligned.insert(i);
+ }
}
- std::vector<Node *> targetTreeLeaves;
- getTargetTreeLeaves(m_root, targetTreeLeaves);
-
- for (Alignment::const_iterator p(a.begin()); p != a.end(); ++p) {
- Node * src = m_sourceNodes[p->first];
- Node * tgt = targetTreeLeaves[p->second];
- src->addParent(tgt);
- tgt->addChild(src);
+ // Determine the attachment point for each one and attach it.
+ for (std::set<int>::iterator p = unaligned.begin();
+ p != unaligned.end(); ++p) {
+ int index = *p;
+ Node *attachmentPoint = DetermineAttachmentPoint(index);
+ Node *sourceNode = m_sourceNodes[index];
+ attachmentPoint->AddChild(sourceNode);
+ sourceNode->AddParent(attachmentPoint);
}
}
-AlignmentGraph::~AlignmentGraph()
+Node *AlignmentGraph::DetermineAttachmentPoint(int index)
{
- for (std::vector<Node *>::iterator p(m_sourceNodes.begin());
- p != m_sourceNodes.end(); ++p) {
- delete *p;
- }
- for (std::vector<Node *>::iterator p(m_targetNodes.begin());
- p != m_targetNodes.end(); ++p) {
- delete *p;
+ // Find the nearest aligned neighbour to the left, if any.
+ int i = index;
+ while (--i >= 0) {
+ if (!m_sourceNodes[i]->GetParents().empty()) {
+ break;
+ }
}
-}
-
-std::vector<Rule>
-AlignmentGraph::inferRules() const
-{
- size_t i = 0;
- std::vector<Node *>::const_iterator p(m_sourceNodes.begin());
- for (; p != m_sourceNodes.end(); ++p, ++i) {
- (*p)->propagateIndex(i);
+ // No aligned neighbours to the left, so attach to the root.
+ if (i == -1) {
+ return m_root;
}
-
- calcComplementSpans(m_root);
-
- std::set<Node *> frontierSet;
- computeFrontierSet(m_root, frontierSet);
-
- std::vector<Subgraph> fragments;
- for (std::set<Node *>::iterator p(frontierSet.begin());
- p != frontierSet.end(); ++p) {
- Subgraph subgraph(*p);
- while (!subgraph.expand(frontierSet)) {
- ;
- }
- if (subgraph.canFormSCFGRule()) {
- fragments.push_back(subgraph);
+ // Find the nearest aligned neighbour to the right, if any.
+ int j = index;
+ while (++j < m_sourceNodes.size()) {
+ if (!m_sourceNodes[j]->GetParents().empty()) {
+ break;
}
}
-
- std::vector<Rule> rules;
- for (std::vector<Subgraph>::const_iterator p = fragments.begin();
- p != fragments.end(); ++p) {
- rules.push_back(fragmentToRule(*p));
+ // No aligned neighbours to the right, so attach to the root.
+ if (j == m_sourceNodes.size()) {
+ return m_root;
}
-
- return rules;
+ // Construct the set of target nodes that are aligned to the left and right
+ // neighbours.
+ const std::vector<Node *> &leftParents = m_sourceNodes[i]->GetParents();
+ assert(!leftParents.empty());
+ const std::vector<Node *> &rightParents = m_sourceNodes[j]->GetParents();
+ assert(!rightParents.empty());
+ std::set<Node *> targetSet;
+ targetSet.insert(leftParents.begin(), leftParents.end());
+ targetSet.insert(rightParents.begin(), rightParents.end());
+ // The attachment point is the lowest common ancestor of the target word
+ // nodes, unless the LCA is itself a target word, in which case the LCA
+ // is the parent. This is to avoid including introducing new word alignments.
+ // It assumes that the parse tree uses preterminals for parts of speech.
+ Node *lca = Node::LowestCommonAncestor(targetSet.begin(), targetSet.end());
+ if (lca->GetType() == TARGET) {
+ assert(lca->GetParents().size() == 1);
+ return lca->GetParents()[0];
+ }
+ return lca;
}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.h b/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.h
index 32698d5c3..94948758a 100644
--- a/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.h
+++ b/scripts/training/phrase-extract/extract-ghkm/AlignmentGraph.h
@@ -1,154 +1,76 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef ALIGNMENTGRAPH_H_INCLUDED_
-#define ALIGNMENTGRAPH_H_INCLUDED_
+#ifndef EXTRACT_GHKM_ALIGNMENT_GRAPH_H_
+#define EXTRACT_GHKM_ALIGNMENT_GRAPH_H_
#include "Alignment.h"
-#include "ParseTree.h"
-#include "Span.h"
-#include "Rule.h"
+#include "Options.h"
+#include <set>
#include <string>
#include <vector>
-enum NodeType { SOURCE, TARGET, TREE };
-
-class Node
-{
-public:
-
- Node(const std::string & label, NodeType type)
- : m_label(label)
- , m_type(type)
- , m_children()
- , m_parents()
- {}
-
- const std::string &
- getLabel() const {
- return m_label;
- }
-
- NodeType
- getType() const {
- return m_type;
- }
-
- const std::vector<Node*> &
- getChildren() const {
- return m_children;
- }
-
- const std::vector<Node*> &
- getParents() const {
- return m_parents;
- }
-
- void
- setChildren(const std::vector<Node*> &);
-
- void
- setParents(const std::vector<Node*> &);
-
- void
- addChild(Node *);
-
- void
- addParent(Node *);
-
- bool
- isSink() const;
-
- void
- propagateIndex(int);
+namespace Moses {
+namespace GHKM {
- Span &
- getSpan() {
- return m_span;
- }
-
- const Span &
- getSpan() const {
- return m_span;
- }
-
- Span &
- getComplementSpan() {
- return m_complementSpan;
- }
-
- const Span &
- getComplementSpan() const {
- return m_complementSpan;
- }
-
- std::vector<std::string>
- getTargetWords() const;
-
-private:
- std::string m_label;
- NodeType m_type;
- std::vector<Node*> m_children;
- std::vector<Node*> m_parents;
- Span m_span;
- Span m_complementSpan;
-
- // Disallow copying
- Node(const Node &);
- Node & operator=(const Node &);
-
- void
- getTargetWords(std::vector<std::string> &) const;
-};
+class Node;
+class ParseTree;
+class Subgraph;
class AlignmentGraph
{
-public:
+ public:
AlignmentGraph(const ParseTree *,
const std::vector<std::string> &,
const Alignment &);
~AlignmentGraph();
- Node *
- getRoot() {
- return m_root;
- }
-
- std::vector<Node *> &
- getSourceNodes() {
- return m_sourceNodes;
- }
+ Node *GetRoot() { return m_root; }
+ const std::vector<Node *> &GetTargetNodes() { return m_targetNodes; }
- std::vector<Rule>
- inferRules() const;
-
-private:
- Node * m_root;
- std::vector<Node *> m_sourceNodes;
- std::vector<Node *> m_targetNodes;
+ void ExtractMinimalRules(const Options &);
+ void ExtractComposedRules(const Options &);
+ private:
// Disallow copying
AlignmentGraph(const AlignmentGraph &);
- AlignmentGraph & operator=(const AlignmentGraph &);
+ AlignmentGraph &operator=(const AlignmentGraph &);
+
+ Node *CopyParseTree(const ParseTree *);
+ void ComputeFrontierSet(Node *, const Options &, std::set<Node *> &) const;
+ void CalcComplementSpans(Node *);
+ void GetTargetTreeLeaves(Node *, std::vector<Node *> &);
+ void AttachUnalignedSourceWords();
+ Node *DetermineAttachmentPoint(int);
+ Subgraph ComputeMinimalFrontierGraphFragment(Node *,
+ const std::set<Node *> &);
+ void ExtractComposedRules(Node *, const Options &);
+
+ Node *m_root;
+ std::vector<Node *> m_sourceNodes;
+ std::vector<Node *> m_targetNodes;
};
+} // namespace GHKM
+} // namespace Moses
+
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/ComposedRule.cpp b/scripts/training/phrase-extract/extract-ghkm/ComposedRule.cpp
new file mode 100644
index 000000000..7a7fba106
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ComposedRule.cpp
@@ -0,0 +1,129 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "ComposedRule.h"
+
+#include "Node.h"
+#include "Options.h"
+#include "Subgraph.h"
+
+#include <set>
+#include <vector>
+#include <queue>
+
+namespace Moses {
+namespace GHKM {
+
+ComposedRule::ComposedRule(const Subgraph &baseRule)
+ : m_baseRule(baseRule)
+ , m_depth(baseRule.GetDepth())
+ , m_size(baseRule.GetSize())
+ , m_nodeCount(baseRule.GetNodeCount())
+{
+ const std::set<const Node *> &leaves = baseRule.GetLeaves();
+ for (std::set<const Node *>::const_iterator p = leaves.begin();
+ p != leaves.end(); ++p) {
+ if ((*p)->GetType() == TREE) {
+ m_openAttachmentPoints.push(*p);
+ }
+ }
+}
+
+ComposedRule::ComposedRule(const ComposedRule &other, const Subgraph &rule,
+ int depth)
+ : m_baseRule(other.m_baseRule)
+ , m_attachedRules(other.m_attachedRules)
+ , m_openAttachmentPoints(other.m_openAttachmentPoints)
+ , m_depth(depth)
+ , m_size(other.m_size+rule.GetSize())
+ , m_nodeCount(other.m_nodeCount+rule.GetNodeCount()-1)
+{
+ m_attachedRules.push_back(&rule);
+ m_openAttachmentPoints.pop();
+}
+
+const Node *ComposedRule::GetOpenAttachmentPoint()
+{
+ return m_openAttachmentPoints.empty() ? 0 : m_openAttachmentPoints.front();
+}
+
+void ComposedRule::CloseAttachmentPoint()
+{
+ assert(!m_openAttachmentPoints.empty());
+ m_attachedRules.push_back(0);
+ m_openAttachmentPoints.pop();
+}
+
+ComposedRule *ComposedRule::AttemptComposition(const Subgraph &rule,
+ const Options &options) const
+{
+ // The smallest possible rule fragment should be rooted at a tree node.
+ // Note that this differs from the original GHKM definition.
+ assert(rule.GetRoot()->GetType() == TREE);
+
+ // Check the node count of the proposed rule.
+ if (m_nodeCount+rule.GetNodeCount()-1 > options.maxNodes) {
+ return 0;
+ }
+
+ // Check the size of the proposed rule.
+ if (m_size+rule.GetSize() > options.maxRuleSize) {
+ return 0;
+ }
+
+ // Determine the depth of the proposed rule and test whether it exceeds the
+ // limit.
+ int attachmentPointDepth = 0;
+ const Node *n = rule.GetRoot();
+ while (n != m_baseRule.GetRoot()) {
+ assert(n->GetParents().size() == 1);
+ n = n->GetParents()[0];
+ ++attachmentPointDepth;
+ }
+ int newDepth = std::max(m_depth, attachmentPointDepth+rule.GetDepth());
+ if (newDepth > options.maxRuleDepth) {
+ return 0;
+ }
+
+ return new ComposedRule(*this, rule, newDepth);
+}
+
+Subgraph ComposedRule::CreateSubgraph()
+{
+ std::set<const Node *> leaves;
+ const std::set<const Node *> &baseLeaves = m_baseRule.GetLeaves();
+ int i = 0;
+ for (std::set<const Node *>::const_iterator p = baseLeaves.begin();
+ p != baseLeaves.end(); ++p) {
+ const Node *baseLeaf = *p;
+ if (baseLeaf->GetType() == TREE && i < m_attachedRules.size()) {
+ const Subgraph *attachedRule = m_attachedRules[i++];
+ if (attachedRule) {
+ leaves.insert(attachedRule->GetLeaves().begin(),
+ attachedRule->GetLeaves().end());
+ continue;
+ }
+ }
+ leaves.insert(baseLeaf);
+ }
+ return Subgraph(m_baseRule.GetRoot(), leaves);
+}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/ComposedRule.h b/scripts/training/phrase-extract/extract-ghkm/ComposedRule.h
new file mode 100644
index 000000000..34394c935
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ComposedRule.h
@@ -0,0 +1,70 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_COMPOSED_RULE_H_
+#define EXTRACT_GHKM_COMPOSED_RULE_H_
+
+#include "Subgraph.h"
+
+#include <vector>
+#include <queue>
+
+namespace Moses {
+namespace GHKM {
+
+class Node;
+class Options;
+
+class ComposedRule
+{
+ public:
+ // Form a 'trivial' ComposedRule from a single existing rule.
+ ComposedRule(const Subgraph &baseRule);
+
+ // Returns the first open attachment point if any exist or 0 otherwise.
+ const Node *GetOpenAttachmentPoint();
+
+ // Close the first open attachment point without attaching a rule.
+ void CloseAttachmentPoint();
+
+ // Attempts to produce a new composed rule by attaching a given rule at the
+ // first open attachment point. This will fail if the proposed rule violates
+ // the constraints set in the Options object, in which case the function
+ // returns 0.
+ ComposedRule *AttemptComposition(const Subgraph &, const Options &) const;
+
+ // Constructs a Subgraph object corresponding to the composed rule.
+ Subgraph CreateSubgraph();
+
+ private:
+ ComposedRule(const ComposedRule &, const Subgraph &, int);
+
+ const Subgraph &m_baseRule;
+ std::vector<const Subgraph *> m_attachedRules;
+ std::queue<const Node *> m_openAttachmentPoints;
+ int m_depth;
+ int m_nodeCount;
+ int m_size;
+};
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Exception.h b/scripts/training/phrase-extract/extract-ghkm/Exception.h
index 56c5f83f0..9928785f0 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Exception.h
+++ b/scripts/training/phrase-extract/extract-ghkm/Exception.h
@@ -1,46 +1,42 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef EXCEPTION_H_INCLUDED_
-#define EXCEPTION_H_INCLUDED_
+#ifndef EXTRACT_GHKM_EXCEPTION_H_
+#define EXTRACT_GHKM_EXCEPTION_H_
#include <string>
+namespace Moses {
+namespace GHKM {
+
class Exception
{
-public:
- Exception(const char * msg)
- : m_msg(msg)
- {}
-
- Exception(const std::string & msg)
- : m_msg(msg)
- {}
-
- const std::string &
- getMsg() const {
- return m_msg;
- }
-
-private:
+ public:
+ Exception(const char *msg) : m_msg(msg) {}
+ Exception(const std::string &msg) : m_msg(msg) {}
+ const std::string &GetMsg() const { return m_msg; }
+ private:
std::string m_msg;
};
+} // namespace GHKM
+} // namespace Moses
+
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.cpp b/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.cpp
new file mode 100644
index 000000000..dad326131
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.cpp
@@ -0,0 +1,476 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "ExtractGHKM.h"
+
+#include "Alignment.h"
+#include "AlignmentGraph.h"
+#include "Exception.h"
+#include "Node.h"
+#include "Options.h"
+#include "ParseTree.h"
+#include "ScfgRule.h"
+#include "ScfgRuleWriter.h"
+#include "Span.h"
+#include "XmlTreeParser.h"
+
+#include <boost/program_options.hpp>
+
+#include <cassert>
+#include <cstdlib>
+#include <fstream>
+#include <iostream>
+#include <iterator>
+#include <string>
+#include <sstream>
+#include <vector>
+
+namespace Moses {
+namespace GHKM {
+
+int ExtractGHKM::Main(int argc, char *argv[])
+{
+ // Process command-line options.
+ Options options;
+ ProcessOptions(argc, argv, options);
+
+ // Open input files.
+ std::ifstream targetStream;
+ std::ifstream sourceStream;
+ std::ifstream alignmentStream;
+ OpenInputFileOrDie(options.targetFile, targetStream);
+ OpenInputFileOrDie(options.sourceFile, sourceStream);
+ OpenInputFileOrDie(options.alignmentFile, alignmentStream);
+
+ // Open output files.
+ std::ofstream extractStream;
+ std::ofstream invExtractStream;
+ std::ofstream glueGrammarStream;
+ std::ofstream unknownWordStream;
+ std::string invExtractFileName = options.extractFile + std::string(".inv");
+ OpenOutputFileOrDie(options.extractFile, extractStream);
+ OpenOutputFileOrDie(invExtractFileName, invExtractStream);
+ if (!options.glueGrammarFile.empty()) {
+ OpenOutputFileOrDie(options.glueGrammarFile, glueGrammarStream);
+ }
+ if (!options.unknownWordFile.empty()) {
+ OpenOutputFileOrDie(options.unknownWordFile, unknownWordStream);
+ }
+
+ // Target label sets for producing glue grammar.
+ std::set<std::string> labelSet;
+ std::set<std::string> topLabelSet;
+
+ // Word count statistics for producing unknown word labels.
+ std::map<std::string, int> wordCount;
+ std::map<std::string, std::string> wordLabel;
+
+ std::string targetLine;
+ std::string sourceLine;
+ std::string alignmentLine;
+ ScfgRuleWriter writer(extractStream, invExtractStream, options);
+ size_t lineNum = 0;
+ while (true) {
+ std::getline(targetStream, targetLine);
+ std::getline(sourceStream, sourceLine);
+ std::getline(alignmentStream, alignmentLine);
+
+ if (targetStream.eof() && sourceStream.eof() && alignmentStream.eof()) {
+ break;
+ }
+
+ if (targetStream.eof() || sourceStream.eof() || alignmentStream.eof()) {
+ Error("Files must contain same number of lines");
+ }
+
+ ++lineNum;
+
+ // Parse target tree.
+ std::auto_ptr<ParseTree> t(ParseXmlTree(targetLine));
+ if (!t.get()) {
+ std::ostringstream s;
+ s << "Failed to parse XML tree at line " << lineNum;
+ Error(s.str());
+ }
+
+ // Read source tokens.
+ std::vector<std::string> sourceTokens(ReadTokens(sourceLine));
+
+ // Read word alignments.
+ Alignment alignment;
+ try {
+ alignment = ReadAlignment(alignmentLine);
+ } catch (const Exception &e) {
+ std::ostringstream s;
+ s << "Failed to read alignment at line " << lineNum << ": ";
+ s << e.GetMsg();
+ Error(s.str());
+ }
+
+ // Record tree labels for use in glue grammar.
+ if (!options.glueGrammarFile.empty()) {
+ // Record labels that cover the full sentence to topLabelSet.
+ ParseTree *p = t.get();
+ topLabelSet.insert(p->GetLabel());
+ while (p->GetChildren().size() == 1) {
+ p = p->GetChildren()[0];
+ if (p->IsLeaf()) {
+ break;
+ }
+ topLabelSet.insert(p->GetLabel());
+ }
+ // Record all labels to labelSet.
+ RecordTreeLabels(*t, labelSet);
+ }
+
+ // Record word counts.
+ if (!options.unknownWordFile.empty()) {
+ CollectWordLabelCounts(*t, wordCount, wordLabel);
+ }
+
+ // Form an alignment graph from the target tree, source words, and
+ // alignment.
+ AlignmentGraph graph(t.get(), sourceTokens, alignment);
+
+ // Extract minimal rules, adding each rule to its root node's rule set.
+ graph.ExtractMinimalRules(options);
+
+ // Extract composed rules.
+ if (!options.minimal) {
+ graph.ExtractComposedRules(options);
+ }
+
+ // Write the rules, subject to scope pruning.
+ const std::vector<Node *> &targetNodes = graph.GetTargetNodes();
+ for (std::vector<Node *>::const_iterator p = targetNodes.begin();
+ p != targetNodes.end(); ++p) {
+ const std::vector<const Subgraph *> &rules = (*p)->GetRules();
+ for (std::vector<const Subgraph *>::const_iterator q = rules.begin();
+ q != rules.end(); ++q) {
+ ScfgRule r(**q);
+ // TODO Can scope pruning be done earlier?
+ if (r.Scope() <= options.maxScope) {
+ writer.Write(r);
+ }
+ }
+ }
+ }
+
+ if (!options.glueGrammarFile.empty()) {
+ WriteGlueGrammar(labelSet, topLabelSet, glueGrammarStream);
+ }
+
+ if (!options.unknownWordFile.empty()) {
+ WriteUnknownWordLabel(wordCount, wordLabel, unknownWordStream);
+ }
+
+ return 0;
+}
+
+void ExtractGHKM::OpenInputFileOrDie(const std::string &filename,
+ std::ifstream &stream)
+{
+ stream.open(filename.c_str());
+ if (!stream) {
+ std::ostringstream msg;
+ msg << "failed to open input file: " << filename;
+ Error(msg.str());
+ }
+}
+
+void ExtractGHKM::OpenOutputFileOrDie(const std::string &filename,
+ std::ofstream &stream)
+{
+ stream.open(filename.c_str());
+ if (!stream) {
+ std::ostringstream msg;
+ msg << "failed to open output file: " << filename;
+ Error(msg.str());
+ }
+}
+
+void ExtractGHKM::ProcessOptions(int argc, char *argv[],
+ Options &options) const
+{
+ namespace po = boost::program_options;
+ namespace cls = boost::program_options::command_line_style;
+
+ // Construct the 'top' of the usage message: the bit that comes before the
+ // options list.
+ std::ostringstream usageTop;
+ usageTop << "Usage: " << GetName()
+ << " [OPTION]... TARGET SOURCE ALIGNMENT EXTRACT\n\n"
+ << "SCFG rule extractor based on the GHKM algorithm described in\n"
+ << "Galley et al. (2004).\n\n"
+ << "Options";
+
+ // Construct the 'bottom' of the usage message.
+ std::ostringstream usageBottom;
+ usageBottom << "\nImplementation Notes:\n"
+ << "\nThe parse tree is assumed to contain part-of-speech preterminal nodes.\n"
+ << "\n"
+ << "For the composed rule constraints: rule depth is the maximum distance from the\nrule's root node to a sink node, not counting preterminal expansions or word\nalignments. Rule size is the measure defined in DeNeefe et al (2007): the\nnumber of non-part-of-speech, non-leaf constituent labels in the target tree.\nNode count is the number of target tree nodes (excluding target words).\n"
+ << "\n"
+ << "Scope pruning (Hopkins and Langmead, 2010) is applied to both minimal and\ncomposed rules.\n"
+ << "\n"
+ << "Unaligned source words are attached to the tree using the following heuristic:\nif there are aligned source words to both the left and the right of an unaligned\nsource word then it is attached to the lowest common ancestor of its nearest\nsuch left and right neighbours. Otherwise, it is attached to the root of the\nparse tree.\n"
+ << "\n"
+ << "Unless the --AllowUnary option is given, unary rules containing no lexical\nsource items are eliminated using the method described in Chung et al. (2011).\nThe parsing algorithm used in Moses is unable to handle such rules.\n"
+ << "\n"
+ << "References:\n"
+ << "Galley, M., Hopkins, M., Knight, K., and Marcu, D. (2004)\n"
+ << "\"What's in a Translation Rule?\", In Proceedings of HLT/NAACL 2004.\n"
+ << "\n"
+ << "DeNeefe, S., Knight, K., Wang, W., and Marcu, D. (2007)\n"
+ << "\"What Can Syntax-Based MT Learn from Phrase-Based MT?\", In Proceedings of\nEMNLP-CoNLL 2007.\n"
+ << "\n"
+ << "Hopkins, M. and Langmead, G. (2010)\n"
+ << "\"SCFG Decoding Without Binarization\", In Proceedings of EMNLP 2010.\n"
+ << "\n"
+ << "Chung, T. and Fang, L. and Gildea, D. (2011)\n"
+ << "\"Issues Concerning Decoding with Synchronous Context-free Grammar\", In\nProceedings of ACL/HLT 2011.";
+
+ // Declare the command line options that are visible to the user.
+ po::options_description visible(usageTop.str());
+ visible.add_options()
+ //("help", "print this help message and exit")
+ ("AllowUnary",
+ "allow fully non-lexical unary rules")
+ ("GlueGrammar",
+ po::value(&options.glueGrammarFile),
+ "write glue grammar to named file")
+ ("MaxNodes",
+ po::value(&options.maxNodes)->default_value(options.maxNodes),
+ "set maximum number of tree nodes for composed rules")
+ ("MaxRuleDepth",
+ po::value(&options.maxRuleDepth)->default_value(options.maxRuleDepth),
+ "set maximum depth for composed rules")
+ ("MaxRuleSize",
+ po::value(&options.maxRuleSize)->default_value(options.maxRuleSize),
+ "set maximum size for composed rules")
+ ("MaxScope",
+ po::value(&options.maxScope)->default_value(options.maxScope),
+ "set maximum allowed scope")
+ ("Minimal",
+ "extract minimal rules only")
+ ("UnknownWordLabel",
+ po::value(&options.unknownWordFile),
+ "write unknown word labels to named file")
+ ("UnpairedExtractFormat",
+ "do not pair non-terminals in extract files")
+ ;
+
+ // Declare the command line options that are hidden from the user
+ // (these are used as positional options).
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("TargetFile",
+ po::value(&options.targetFile),
+ "target file")
+ ("SourceFile",
+ po::value(&options.sourceFile),
+ "source file")
+ ("AlignmentFile",
+ po::value(&options.alignmentFile),
+ "alignment file")
+ ("ExtractFile",
+ po::value(&options.extractFile),
+ "extract file")
+ ;
+
+ // Compose the full set of command-line options.
+ po::options_description cmdLineOptions;
+ cmdLineOptions.add(visible).add(hidden);
+
+ // Register the positional options.
+ po::positional_options_description p;
+ p.add("TargetFile", 1);
+ p.add("SourceFile", 1);
+ p.add("AlignmentFile", 1);
+ p.add("ExtractFile", 1);
+
+ // Process the command-line.
+ po::variables_map vm;
+ const int optionStyle = cls::allow_long
+ | cls::long_allow_adjacent
+ | cls::long_allow_next;
+ try {
+ po::store(po::command_line_parser(argc, argv).style(optionStyle).
+ options(cmdLineOptions).positional(p).run(), vm);
+ po::notify(vm);
+ } catch (const std::exception &e) {
+ std::ostringstream msg;
+ msg << e.what() << "\n\n" << visible << usageBottom.str();
+ Error(msg.str());
+ }
+
+ if (vm.count("help")) {
+ std::cout << visible << usageBottom.str() << std::endl;
+ std::exit(0);
+ }
+
+ // Check all positional options were given.
+ if (!vm.count("TargetFile") ||
+ !vm.count("SourceFile") ||
+ !vm.count("AlignmentFile") ||
+ !vm.count("ExtractFile")) {
+ std::ostringstream msg;
+ std::cerr << visible << usageBottom.str() << std::endl;
+ std::exit(1);
+ }
+
+ // Process Boolean options.
+ if (vm.count("AllowUnary")) {
+ options.allowUnary = true;
+ }
+ if (vm.count("Minimal")) {
+ options.minimal = true;
+ }
+ if (vm.count("UnpairedExtractFormat")) {
+ options.unpairedExtractFormat = true;
+ }
+}
+
+void ExtractGHKM::Error(const std::string &msg) const
+{
+ std::cerr << GetName() << ": " << msg << std::endl;
+ std::exit(1);
+}
+
+std::vector<std::string> ExtractGHKM::ReadTokens(const std::string &s)
+{
+ std::vector<std::string> tokens;
+
+ std::string whitespace = " \t";
+
+ std::string::size_type begin = s.find_first_not_of(whitespace);
+ assert(begin != std::string::npos);
+ while (true) {
+ std::string::size_type end = s.find_first_of(whitespace, begin);
+ std::string token;
+ if (end == std::string::npos) {
+ token = s.substr(begin);
+ } else {
+ token = s.substr(begin, end-begin);
+ }
+ tokens.push_back(token);
+ if (end == std::string::npos) {
+ break;
+ }
+ begin = s.find_first_not_of(whitespace, end);
+ if (begin == std::string::npos) {
+ break;
+ }
+ }
+
+ return tokens;
+}
+
+void ExtractGHKM::WriteGlueGrammar(const std::set<std::string> &labelSet,
+ const std::set<std::string> &topLabelSet,
+ std::ostream &out)
+{
+ // chose a top label that is not already a label
+ std::string topLabel = "QQQQQQ";
+ for(int i = 1; i <= topLabel.length(); i++) {
+ if (labelSet.find(topLabel.substr(0,i)) == labelSet.end() ) {
+ topLabel = topLabel.substr(0,i);
+ break;
+ }
+ }
+
+ // basic rules
+ out << "<s> [X] ||| <s> [" << topLabel << "] ||| 1 ||| " << std::endl;
+ out << "[X][" << topLabel << "] </s> [X] ||| [X][" << topLabel << "] </s> [" << topLabel << "] ||| 1 ||| 0-0 " << std::endl;
+
+ // top rules
+ for (std::set<std::string>::const_iterator i = topLabelSet.begin();
+ i != topLabelSet.end(); ++i) {
+ out << "<s> [X][" << *i << "] </s> [X] ||| <s> [X][" << *i << "] </s> [" << topLabel << "] ||| 1 ||| 1-1" << std::endl;
+ }
+
+ // glue rules
+ for(std::set<std::string>::const_iterator i = labelSet.begin();
+ i != labelSet.end(); i++ ) {
+ out << "[X][" << topLabel << "] [X][" << *i << "] [X] ||| [X][" << topLabel << "] [X][" << *i << "] [" << topLabel << "] ||| 2.718 ||| 0-0 1-1" << std::endl;
+ }
+ // glue rule for unknown word...
+ out << "[X][" << topLabel << "] [X][X] [X] ||| [X][" << topLabel << "] [X][X] [" << topLabel << "] ||| 2.718 ||| 0-0 1-1 " << std::endl;
+}
+
+void ExtractGHKM::RecordTreeLabels(const ParseTree &t,
+ std::set<std::string> &labelSet)
+{
+ labelSet.insert(t.GetLabel());
+ const std::vector<ParseTree *> &children = t.GetChildren();
+ for (std::vector<ParseTree *>::const_iterator p = children.begin();
+ p != children.end(); ++p) {
+ const ParseTree &child = **p;
+ if (!child.IsLeaf()) {
+ RecordTreeLabels(child, labelSet);
+ }
+ }
+}
+
+void ExtractGHKM::CollectWordLabelCounts(
+ ParseTree &root,
+ std::map<std::string, int> &wordCount,
+ std::map<std::string, std::string> &wordLabel)
+{
+ std::vector<const ParseTree*> leaves;
+ root.GetLeaves(std::back_inserter(leaves));
+ for (std::vector<const ParseTree *>::const_iterator p = leaves.begin();
+ p != leaves.end(); ++p) {
+ const ParseTree &leaf = **p;
+ const std::string &word = leaf.GetLabel();
+ const std::string &label = leaf.GetParent()->GetLabel();
+ ++wordCount[word];
+ wordLabel[word] = label;
+ }
+}
+
+void ExtractGHKM::WriteUnknownWordLabel(
+ const std::map<std::string, int> &wordCount,
+ const std::map<std::string, std::string> &wordLabel,
+ std::ostream &out)
+{
+ std::map<std::string, int> labelCount;
+ int total = 0;
+ for (std::map<std::string, int>::const_iterator p = wordCount.begin();
+ p != wordCount.end(); ++p) {
+ // Only consider singletons.
+ if (p->second == 1) {
+ std::map<std::string, std::string>::const_iterator q =
+ wordLabel.find(p->first);
+ assert(q != wordLabel.end());
+ ++labelCount[q->second];
+ ++total;
+ }
+ }
+ for (std::map<std::string, int>::const_iterator p = labelCount.begin();
+ p != labelCount.end(); ++p) {
+ double ratio = static_cast<double>(p->second) / static_cast<double>(total);
+ if (ratio > 0.03) {
+ out << p->first << " " << ratio << std::endl;
+ }
+ }
+}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.h b/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.h
new file mode 100644
index 000000000..4c06c2646
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ExtractGHKM.h
@@ -0,0 +1,67 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_EXTRACT_GHKM_H_
+#define EXTRACT_GHKM_EXTRACT_GHKM_H_
+
+#include <map>
+#include <ostream>
+#include <set>
+#include <string>
+#include <vector>
+
+namespace Moses {
+namespace GHKM {
+
+class Options;
+class ParseTree;
+
+class ExtractGHKM
+{
+ public:
+ ExtractGHKM() : m_name("extract-ghkm") {}
+ const std::string &GetName() const { return m_name; }
+ int Main(int argc, char *argv[]);
+ private:
+ void Error(const std::string &) const;
+ void OpenInputFileOrDie(const std::string &, std::ifstream &);
+ void OpenOutputFileOrDie(const std::string &, std::ofstream &);
+ void RecordTreeLabels(const ParseTree &, std::set<std::string> &);
+ void CollectWordLabelCounts(ParseTree &,
+ std::map<std::string, int> &,
+ std::map<std::string, std::string> &);
+ void WriteUnknownWordLabel(
+ const std::map<std::string, int> &,
+ const std::map<std::string, std::string> &,
+ std::ostream &);
+ void WriteGlueGrammar(const std::set<std::string> &,
+ const std::set<std::string> &,
+ std::ostream &);
+ std::vector<std::string> ReadTokens(const std::string &);
+
+ void ProcessOptions(int, char *[], Options &) const;
+
+ std::string m_name;
+};
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Jamfile b/scripts/training/phrase-extract/extract-ghkm/Jamfile
new file mode 100644
index 000000000..860ceba31
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/Jamfile
@@ -0,0 +1,3 @@
+exe extract-ghkm : [ glob *.cpp ] ..//trees ../../../..//boost_program_options ;
+
+install tools : extract-ghkm : <install-type>EXE ;
diff --git a/scripts/training/phrase-extract/extract-ghkm/Main.cpp b/scripts/training/phrase-extract/extract-ghkm/Main.cpp
new file mode 100644
index 000000000..faf3230a6
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/Main.cpp
@@ -0,0 +1,26 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "ExtractGHKM.h"
+
+int main(int argc, char *argv[])
+{
+ Moses::GHKM::ExtractGHKM tool;
+ return tool.Main(argc, argv);
+}
diff --git a/scripts/training/phrase-extract/extract-ghkm/Makefile b/scripts/training/phrase-extract/extract-ghkm/Makefile
deleted file mode 100644
index 23c0ff104..000000000
--- a/scripts/training/phrase-extract/extract-ghkm/Makefile
+++ /dev/null
@@ -1,34 +0,0 @@
-.DELETE_ON_ERROR:
-.PHONY: all clean
-
-CC = g++
-CFLAGS = -pedantic -Wall -O -g
-CPPFLAGS = -I ../
-
-PROG = extract-ghkm
-SOURCES = $(addsuffix .cpp,$(PROG)) \
- Alignment.cpp \
- AlignmentGraph.cpp \
- ParseTree.cpp \
- Span.cpp \
- Subgraph.cpp \
- ../SyntaxTree.cpp \
- ../XmlTree.cpp \
- XmlTreeParser.cpp
-OBJS = $(notdir $(SOURCES:.cpp=.o))
-
-all: $(PROG)
-
-clean:
- @rm -f $(PROG) $(OBJS)
-
-$(PROG): $(OBJS)
- $(CC) $(CPPFLAGS) $(CFLAGS) $^ $(LDFLAGS) -o $@
-
-include Makefile.dep
-
-Makefile.dep: $(SOURCES)
- $(CC) $(CPPFLAGS) -MM $(SOURCES) > $@
-
-%.o:
- $(CC) -c $(CPPFLAGS) $(CFLAGS) $<
diff --git a/scripts/training/phrase-extract/extract-ghkm/Makefile.dep b/scripts/training/phrase-extract/extract-ghkm/Makefile.dep
deleted file mode 100644
index 473d02e06..000000000
--- a/scripts/training/phrase-extract/extract-ghkm/Makefile.dep
+++ /dev/null
@@ -1,13 +0,0 @@
-extract-ghkm.o: extract-ghkm.cpp Alignment.h AlignmentGraph.h ParseTree.h \
- Span.h Rule.h Exception.h XmlTreeParser.h
-Alignment.o: Alignment.cpp Alignment.h Exception.h
-AlignmentGraph.o: AlignmentGraph.cpp AlignmentGraph.h Alignment.h \
- ParseTree.h Span.h Rule.h Subgraph.h
-ParseTree.o: ParseTree.cpp ParseTree.h
-Span.o: Span.cpp Span.h
-Subgraph.o: Subgraph.cpp Subgraph.h AlignmentGraph.h Alignment.h \
- ParseTree.h Span.h Rule.h
-SyntaxTree.o: ../SyntaxTree.cpp ../SyntaxTree.h
-XmlTree.o: ../XmlTree.cpp ../SyntaxTree.h
-XmlTreeParser.o: XmlTreeParser.cpp XmlTreeParser.h ParseTree.h \
- ../XmlTree.h ../SyntaxTree.h
diff --git a/scripts/training/phrase-extract/extract-ghkm/Node.cpp b/scripts/training/phrase-extract/extract-ghkm/Node.cpp
new file mode 100644
index 000000000..beb7470b8
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/Node.cpp
@@ -0,0 +1,71 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "Node.h"
+
+#include "Subgraph.h"
+
+namespace Moses {
+namespace GHKM {
+
+Node::~Node()
+{
+ for (std::vector<const Subgraph*>::const_iterator p(m_rules.begin());
+ p != m_rules.end(); ++p) {
+ delete *p;
+ }
+}
+
+bool Node::IsPreterminal() const
+{
+ return (m_type == TREE
+ && m_children.size() == 1
+ && m_children[0]->m_type == TARGET);
+}
+
+void Node::PropagateIndex(int index)
+{
+ m_span.insert(index);
+ for (std::vector<Node *>::const_iterator p(m_parents.begin());
+ p != m_parents.end(); ++p) {
+ (*p)->PropagateIndex(index);
+ }
+}
+
+std::vector<std::string> Node::GetTargetWords() const
+{
+ std::vector<std::string> targetWords;
+ GetTargetWords(targetWords);
+ return targetWords;
+}
+
+void Node::GetTargetWords(std::vector<std::string> &targetWords) const
+{
+ if (m_type == TARGET) {
+ targetWords.push_back(m_label);
+ } else {
+ for (std::vector<Node *>::const_iterator p(m_children.begin());
+ p != m_children.end(); ++p) {
+ (*p)->GetTargetWords(targetWords);
+ }
+ }
+}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/Node.h b/scripts/training/phrase-extract/extract-ghkm/Node.h
new file mode 100644
index 000000000..228fdc812
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/Node.h
@@ -0,0 +1,182 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_NODE_H_
+#define EXTRACT_GHKM_NODE_H_
+
+#include "Span.h"
+
+#include <cassert>
+#include <iterator>
+#include <string>
+#include <vector>
+
+namespace Moses {
+namespace GHKM {
+
+class Subgraph;
+
+enum NodeType { SOURCE, TARGET, TREE };
+
+class Node
+{
+ public:
+ Node(const std::string &label, NodeType type)
+ : m_label(label)
+ , m_type(type)
+ , m_children()
+ , m_parents() {}
+
+ ~Node();
+
+ const std::string &GetLabel() const { return m_label; }
+ NodeType GetType() const { return m_type; }
+ const std::vector<Node*> &GetChildren() const { return m_children; }
+ const std::vector<Node*> &GetParents() const { return m_parents; }
+ const Span &GetSpan() const { return m_span; }
+ const Span &GetComplementSpan() const { return m_complementSpan; }
+ const std::vector<const Subgraph*> &GetRules() const { return m_rules; }
+
+ void SetChildren(const std::vector<Node*> &c) { m_children = c; }
+ void SetParents(const std::vector<Node*> &p) { m_parents = p; }
+ void SetSpan(const Span &s) { m_span = s; }
+ void SetComplementSpan(const Span &cs) { m_complementSpan = cs; }
+
+ void AddChild(Node *c) { m_children.push_back(c); }
+ void AddParent(Node *p) { m_parents.push_back(p); }
+ void AddRule(const Subgraph *s) { m_rules.push_back(s); }
+
+ bool IsSink() const { return m_children.empty(); }
+ bool IsPreterminal() const;
+
+ void PropagateIndex(int);
+
+ std::vector<std::string> GetTargetWords() const;
+
+ // Gets the path from this node's parent to the root. This node is
+ // required to be part of the original parse tree (i.e. not a source word,
+ // which can have multiple parents).
+ template<typename OutputIterator>
+ void GetTreeAncestors(OutputIterator result, bool includeSelf=false);
+
+ // Returns the lowest common ancestor given a sequence of nodes belonging to
+ // the target tree.
+ template<typename InputIterator>
+ static Node *LowestCommonAncestor(InputIterator first, InputIterator last);
+
+ private:
+ // Disallow copying
+ Node(const Node &);
+ Node &operator=(const Node &);
+
+ void GetTargetWords(std::vector<std::string> &) const;
+
+ std::string m_label;
+ NodeType m_type;
+ std::vector<Node*> m_children;
+ std::vector<Node*> m_parents;
+ Span m_span;
+ Span m_complementSpan;
+ std::vector<const Subgraph*> m_rules;
+};
+
+template<typename OutputIterator>
+void Node::GetTreeAncestors(OutputIterator result, bool includeSelf)
+{
+ // This function assumes the node is part of the parse tree.
+ assert(m_type == TARGET || m_type == TREE);
+
+ if (includeSelf) {
+ *result++ = this;
+ }
+
+ Node *ancestor = !(m_parents.empty()) ? m_parents[0] : 0;
+ while (ancestor != 0) {
+ *result++ = ancestor;
+ ancestor = !(ancestor->m_parents.empty()) ? ancestor->m_parents[0] : 0;
+ }
+}
+
+template<typename InputIterator>
+Node *Node::LowestCommonAncestor(InputIterator first, InputIterator last)
+{
+ // Check for an empty sequence.
+ if (first == last) {
+ return 0;
+ }
+
+ // Check for the case that the sequence contains only one distinct node.
+ // Also check that every node belongs to the target tree.
+ InputIterator p = first;
+ Node *lca = *p++;
+ for (; p != last; ++p) {
+ Node *node = *p;
+ assert(node->m_type != SOURCE);
+ if (node != lca) {
+ lca = 0;
+ }
+ }
+ if (lca) {
+ return lca;
+ }
+
+ // Now construct an ancestor path for each node, from itself to the root.
+ size_t minPathLength = 0;
+ std::vector<std::vector<Node *> > paths;
+ for (p = first; p != last; ++p) {
+ paths.resize(paths.size()+1);
+ (*p)->GetTreeAncestors(std::back_inserter(paths.back()), true);
+ size_t pathLength = paths.back().size();
+ assert(pathLength > 0);
+ if (paths.size() == 1 || pathLength < minPathLength) {
+ minPathLength = pathLength;
+ }
+ }
+
+ // Search for the start of the longest common suffix by working forward from
+ // the the earliest possible starting point to the root.
+ for (size_t i = 0; i < minPathLength; ++i) {
+ bool match = true;
+ for (size_t j = 0; j < paths.size(); ++j) {
+ size_t index = paths[j].size() - minPathLength + i;
+ assert(index >= 0);
+ assert(index < paths[j].size());
+ if (j == 0) {
+ lca = paths[j][index];
+ assert(lca);
+ } else if (lca != paths[j][index]) {
+ match = false;
+ break;
+ }
+ }
+ if (match) {
+ return lca;
+ }
+ }
+
+ // A lowest common ancestor should have been found.
+ assert(false);
+ return 0;
+}
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Options.h b/scripts/training/phrase-extract/extract-ghkm/Options.h
new file mode 100644
index 000000000..a34a35744
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/Options.h
@@ -0,0 +1,61 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_OPTIONS_H_
+#define EXTRACT_GHKM_OPTIONS_H_
+
+#include <string>
+
+namespace Moses {
+namespace GHKM {
+
+struct Options {
+ public:
+ Options()
+ : allowUnary(false)
+ , maxNodes(15)
+ , maxRuleDepth(3)
+ , maxRuleSize(3)
+ , maxScope(3)
+ , minimal(false)
+ , unpairedExtractFormat(false) {}
+
+ // Positional options
+ std::string targetFile;
+ std::string sourceFile;
+ std::string alignmentFile;
+ std::string extractFile;
+
+ // All other options
+ bool allowUnary;
+ std::string glueGrammarFile;
+ int maxNodes;
+ int maxRuleDepth;
+ int maxRuleSize;
+ int maxScope;
+ bool minimal;
+ bool unpairedExtractFormat;
+ std::string unknownWordFile;
+};
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/ParseTree.cpp b/scripts/training/phrase-extract/extract-ghkm/ParseTree.cpp
index 05a5929e4..052b8dee1 100644
--- a/scripts/training/phrase-extract/extract-ghkm/ParseTree.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/ParseTree.cpp
@@ -1,24 +1,27 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "ParseTree.h"
+namespace Moses {
+namespace GHKM {
+
ParseTree::~ParseTree()
{
for (std::vector<ParseTree*>::iterator p(m_children.begin());
@@ -27,26 +30,25 @@ ParseTree::~ParseTree()
}
}
-void
-ParseTree::setChildren(const std::vector<ParseTree*> & children)
+void ParseTree::SetChildren(const std::vector<ParseTree*> &children)
{
m_children = children;
}
-void
-ParseTree::setParent(ParseTree * parent)
+void ParseTree::SetParent(ParseTree *parent)
{
m_parent = parent;
}
-void
-ParseTree::addChild(ParseTree * child)
+void ParseTree::AddChild(ParseTree *child)
{
m_children.push_back(child);
}
-bool
-ParseTree::isLeaf() const
+bool ParseTree::IsLeaf() const
{
return m_children.empty();
}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/ParseTree.h b/scripts/training/phrase-extract/extract-ghkm/ParseTree.h
index bd191aea2..ec6fc147a 100644
--- a/scripts/training/phrase-extract/extract-ghkm/ParseTree.h
+++ b/scripts/training/phrase-extract/extract-ghkm/ParseTree.h
@@ -1,75 +1,82 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef PARSETREE_H_INCLUDED_
-#define PARSETREE_H_INCLUDED_
+#ifndef EXTRACT_GHKM_PARSE_TREE_H_
+#define EXTRACT_GHKM_PARSE_TREE_H_
#include <string>
#include <vector>
+namespace Moses {
+namespace GHKM {
+
class ParseTree
{
-public:
- ParseTree(const std::string & label)
- : m_label(label)
- , m_children()
- , m_parent()
- {}
+ public:
+ ParseTree(const std::string &label)
+ : m_label(label)
+ , m_children()
+ , m_parent() {}
~ParseTree();
- const std::string &
- getLabel() const {
- return m_label;
- }
+ const std::string &GetLabel() const { return m_label; }
+ const std::vector<ParseTree*> &GetChildren() const { return m_children; }
+ const ParseTree *GetParent() const { return m_parent; }
- const std::vector<ParseTree*> &
- getChildren() const {
- return m_children;
- }
-
- const ParseTree *
- getParent() const {
- return m_parent;
- }
+ void SetParent(ParseTree *);
+ void SetChildren(const std::vector<ParseTree*> &);
- void
- setParent(ParseTree *);
+ void AddChild(ParseTree *);
- void
- setChildren(const std::vector<ParseTree*> &);
+ bool IsLeaf() const;
- void
- addChild(ParseTree *);
+ template<typename OutputIterator>
+ void GetLeaves(OutputIterator);
- bool
- isLeaf() const;
+ private:
+ // Disallow copying
+ ParseTree(const ParseTree &);
+ ParseTree &operator=(const ParseTree &);
-private:
std::string m_label;
std::vector<ParseTree*> m_children;
- ParseTree * m_parent;
-
- // Disallow copying
- ParseTree(const ParseTree &);
- ParseTree & operator=(const ParseTree &);
+ ParseTree *m_parent;
};
+template<typename OutputIterator>
+void ParseTree::GetLeaves(OutputIterator result)
+{
+ if (IsLeaf()) {
+ *result++ = this;
+ } else {
+ std::vector<ParseTree *>::const_iterator p = m_children.begin();
+ std::vector<ParseTree *>::const_iterator end = m_children.end();
+ while (p != end) {
+ ParseTree &child = **p++;
+ child.GetLeaves(result);
+ }
+ }
+}
+
+} // namespace GHKM
+} // namespace Moses
+
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Rule.h b/scripts/training/phrase-extract/extract-ghkm/Rule.h
deleted file mode 100644
index e641c1044..000000000
--- a/scripts/training/phrase-extract/extract-ghkm/Rule.h
+++ /dev/null
@@ -1,102 +0,0 @@
-/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
-
-#pragma once
-#ifndef RULE_H_INCLUDED_
-#define RULE_H_INCLUDED_
-
-#include "Alignment.h"
-
-#include <string>
-#include <vector>
-
-enum SymbolType { Terminal, NonTerminal };
-
-class Symbol
-{
-public:
- Symbol(const std::string & value, SymbolType type)
- : m_value(value)
- , m_type(type)
- {}
-
- const std::string &
- getValue() const {
- return m_value;
- }
-
- SymbolType
- getType() const {
- return m_type;
- }
-
-private:
- std::string m_value;
- SymbolType m_type;
-};
-
-class Rule
-{
-public:
- Rule(const Symbol & sourceLHS,
- const Symbol & targetLHS,
- const std::vector<Symbol> & sourceRHS,
- const std::vector<Symbol> & targetRHS,
- const Alignment & alignment)
- : m_sourceLHS(sourceLHS)
- , m_targetLHS(targetLHS)
- , m_sourceRHS(sourceRHS)
- , m_targetRHS(targetRHS)
- , m_alignment(alignment)
- {}
-
- const Symbol &
- getSourceLHS() const {
- return m_sourceLHS;
- }
-
- const Symbol &
- getTargetLHS() const {
- return m_targetLHS;
- }
-
- const std::vector<Symbol> &
- getSourceRHS() const {
- return m_sourceRHS;
- }
-
- const std::vector<Symbol> &
- getTargetRHS() const {
- return m_targetRHS;
- }
-
- const Alignment &
- getAlignment() const {
- return m_alignment;
- }
-
-private:
- Symbol m_sourceLHS;
- Symbol m_targetLHS;
- std::vector<Symbol> m_sourceRHS;
- std::vector<Symbol> m_targetRHS;
- Alignment m_alignment;
-};
-
-#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/ScfgRule.cpp b/scripts/training/phrase-extract/extract-ghkm/ScfgRule.cpp
new file mode 100644
index 000000000..648fe6461
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ScfgRule.cpp
@@ -0,0 +1,144 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "ScfgRule.h"
+
+#include "Node.h"
+#include "Subgraph.h"
+
+#include <algorithm>
+
+namespace Moses {
+namespace GHKM {
+
+ScfgRule::ScfgRule(const Subgraph &fragment)
+ : m_sourceLHS("X", NonTerminal)
+ , m_targetLHS(fragment.GetRoot()->GetLabel(), NonTerminal)
+{
+ // Source RHS
+
+ const std::set<const Node *> &leaves = fragment.GetLeaves();
+
+ std::vector<const Node *> sourceRHSNodes;
+ sourceRHSNodes.reserve(leaves.size());
+ for (std::set<const Node *>::const_iterator p(leaves.begin());
+ p != leaves.end(); ++p) {
+ const Node &leaf = **p;
+ if (!leaf.GetSpan().empty()) {
+ sourceRHSNodes.push_back(&leaf);
+ }
+ }
+
+ std::sort(sourceRHSNodes.begin(), sourceRHSNodes.end(), PartitionOrderComp);
+
+ // Build a mapping from target nodes to source-order indices, so that we
+ // can construct the Alignment object later.
+ std::map<const Node *, std::vector<int> > sourceOrder;
+
+ m_sourceRHS.reserve(sourceRHSNodes.size());
+ int srcIndex = 0;
+ for (std::vector<const Node *>::const_iterator p(sourceRHSNodes.begin());
+ p != sourceRHSNodes.end(); ++p, ++srcIndex) {
+ const Node &sinkNode = **p;
+ if (sinkNode.GetType() == TREE) {
+ m_sourceRHS.push_back(Symbol("X", NonTerminal));
+ sourceOrder[&sinkNode].push_back(srcIndex);
+ } else {
+ assert(sinkNode.GetType() == SOURCE);
+ m_sourceRHS.push_back(Symbol(sinkNode.GetLabel(), Terminal));
+ // Add all aligned target words to the sourceOrder map
+ const std::vector<Node *> &parents(sinkNode.GetParents());
+ for (std::vector<Node *>::const_iterator q(parents.begin());
+ q != parents.end(); ++q) {
+ if ((*q)->GetType() == TARGET) {
+ sourceOrder[*q].push_back(srcIndex);
+ }
+ }
+ }
+ }
+
+ // Target RHS + alignment
+
+ std::vector<const Node *> targetLeaves;
+ fragment.GetTargetLeaves(targetLeaves);
+
+ m_alignment.reserve(targetLeaves.size()); // might be too much but that's OK
+ m_targetRHS.reserve(targetLeaves.size());
+
+ for (std::vector<const Node *>::const_iterator p(targetLeaves.begin());
+ p != targetLeaves.end(); ++p) {
+ const Node &leaf = **p;
+ if (leaf.GetSpan().empty()) {
+ // The node doesn't cover any source words, so we can only add
+ // terminals to the target RHS (not a non-terminal).
+ std::vector<std::string> targetWords(leaf.GetTargetWords());
+ for (std::vector<std::string>::const_iterator q(targetWords.begin());
+ q != targetWords.end(); ++q) {
+ m_targetRHS.push_back(Symbol(*q, Terminal));
+ }
+ } else if (leaf.GetType() == SOURCE) {
+ // Do nothing
+ } else {
+ SymbolType type = (leaf.GetType() == TREE) ? NonTerminal : Terminal;
+ m_targetRHS.push_back(Symbol(leaf.GetLabel(), type));
+
+ int tgtIndex = m_targetRHS.size()-1;
+ std::map<const Node *, std::vector<int> >::iterator q(sourceOrder.find(&leaf));
+ assert(q != sourceOrder.end());
+ std::vector<int> &sourceNodes = q->second;
+ for (std::vector<int>::iterator r(sourceNodes.begin());
+ r != sourceNodes.end(); ++r) {
+ int srcIndex = *r;
+ m_alignment.push_back(std::make_pair(srcIndex, tgtIndex));
+ }
+ }
+ }
+}
+
+int ScfgRule::Scope() const
+{
+ int scope = 0;
+ bool predIsNonTerm = false;
+ if (m_sourceRHS[0].GetType() == NonTerminal) {
+ ++scope;
+ predIsNonTerm = true;
+ }
+ for (int i = 1; i < m_sourceRHS.size(); ++i) {
+ bool isNonTerm = m_sourceRHS[i].GetType() == NonTerminal;
+ if (isNonTerm && predIsNonTerm) {
+ ++scope;
+ }
+ predIsNonTerm = isNonTerm;
+ }
+ if (predIsNonTerm) {
+ ++scope;
+ }
+ return scope;
+}
+
+bool ScfgRule::PartitionOrderComp(const Node *a, const Node *b)
+{
+ const Span &aSpan = a->GetSpan();
+ const Span &bSpan = b->GetSpan();
+ assert(!aSpan.empty() && !bSpan.empty());
+ return *(aSpan.begin()) < *(bSpan.begin());
+}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/ScfgRule.h b/scripts/training/phrase-extract/extract-ghkm/ScfgRule.h
new file mode 100644
index 000000000..1ed534d9e
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ScfgRule.h
@@ -0,0 +1,76 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_SCFG_RULE_H_
+#define EXTRACT_GHKM_SCFG_RULE_H_
+
+#include "Alignment.h"
+
+#include <string>
+#include <vector>
+
+namespace Moses {
+namespace GHKM {
+
+class Node;
+class Subgraph;
+
+enum SymbolType { Terminal, NonTerminal };
+
+struct Symbol
+{
+ public:
+ Symbol(const std::string &v, SymbolType t) : m_value(v) , m_type(t) {}
+
+ const std::string &GetValue() const { return m_value; }
+ SymbolType GetType() const { return m_type; }
+
+ private:
+ std::string m_value;
+ SymbolType m_type;
+};
+
+class ScfgRule
+{
+ public:
+ ScfgRule(const Subgraph &fragment);
+
+ const Symbol &GetSourceLHS() const { return m_sourceLHS; }
+ const Symbol &GetTargetLHS() const { return m_targetLHS; }
+ const std::vector<Symbol> &GetSourceRHS() const { return m_sourceRHS; }
+ const std::vector<Symbol> &GetTargetRHS() const { return m_targetRHS; }
+ const Alignment &GetAlignment() const { return m_alignment; }
+
+ int Scope() const;
+
+ private:
+ static bool PartitionOrderComp(const Node *, const Node *);
+
+ Symbol m_sourceLHS;
+ Symbol m_targetLHS;
+ std::vector<Symbol> m_sourceRHS;
+ std::vector<Symbol> m_targetRHS;
+ Alignment m_alignment;
+};
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.cpp b/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.cpp
new file mode 100644
index 000000000..4be3f048d
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.cpp
@@ -0,0 +1,153 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#include "ScfgRuleWriter.h"
+
+#include "Alignment.h"
+#include "Options.h"
+#include "ScfgRule.h"
+
+#include <cassert>
+#include <ostream>
+#include <map>
+#include <sstream>
+#include <vector>
+
+namespace Moses {
+namespace GHKM {
+
+void ScfgRuleWriter::Write(const ScfgRule &rule)
+{
+ if (m_options.unpairedExtractFormat) {
+ WriteUnpairedFormat(rule);
+ } else {
+ WriteStandardFormat(rule);
+ }
+}
+
+void ScfgRuleWriter::WriteStandardFormat(const ScfgRule &rule)
+{
+ const std::vector<Symbol> &sourceRHS = rule.GetSourceRHS();
+ const std::vector<Symbol> &targetRHS = rule.GetTargetRHS();
+
+ std::map<int, int> sourceToTargetNTMap;
+ std::map<int, int> targetToSourceNTMap;
+
+ const Alignment &alignment = rule.GetAlignment();
+
+ for (Alignment::const_iterator p(alignment.begin());
+ p != alignment.end(); ++p) {
+ if (sourceRHS[p->first].GetType() == NonTerminal) {
+ assert(targetRHS[p->second].GetType() == NonTerminal);
+ sourceToTargetNTMap[p->first] = p->second;
+ targetToSourceNTMap[p->second] = p->first;
+ }
+ }
+
+ std::ostringstream sourceSS;
+ std::ostringstream targetSS;
+
+ // Write the source side of the rule to sourceSS.
+ int i = 0;
+ for (std::vector<Symbol>::const_iterator p(sourceRHS.begin());
+ p != sourceRHS.end(); ++p, ++i) {
+ WriteSymbol(*p, sourceSS);
+ if (p->GetType() == NonTerminal) {
+ int targetIndex = sourceToTargetNTMap[i];
+ WriteSymbol(targetRHS[targetIndex], sourceSS);
+ }
+ sourceSS << " ";
+ }
+ WriteSymbol(rule.GetSourceLHS(), sourceSS);
+
+ // Write the target side of the rule to targetSS.
+ i = 0;
+ for (std::vector<Symbol>::const_iterator p(targetRHS.begin());
+ p != targetRHS.end(); ++p, ++i) {
+ if (p->GetType() == NonTerminal) {
+ int sourceIndex = targetToSourceNTMap[i];
+ WriteSymbol(sourceRHS[sourceIndex], targetSS);
+ }
+ WriteSymbol(*p, targetSS);
+ targetSS << " ";
+ }
+ WriteSymbol(rule.GetTargetLHS(), targetSS);
+
+ // Write the rule to the forward and inverse extract files.
+ m_fwd << sourceSS.str() << " ||| " << targetSS.str() << " |||";
+ m_inv << targetSS.str() << " ||| " << sourceSS.str() << " |||";
+ for (Alignment::const_iterator p(alignment.begin());
+ p != alignment.end(); ++p) {
+ m_fwd << " " << p->first << "-" << p->second;
+ m_inv << " " << p->second << "-" << p->first;
+ }
+ m_fwd << " ||| 1" << std::endl;
+ m_inv << " ||| 1" << std::endl;
+}
+
+void ScfgRuleWriter::WriteUnpairedFormat(const ScfgRule &rule)
+{
+ const std::vector<Symbol> &sourceRHS = rule.GetSourceRHS();
+ const std::vector<Symbol> &targetRHS = rule.GetTargetRHS();
+ const Alignment &alignment = rule.GetAlignment();
+
+ std::ostringstream sourceSS;
+ std::ostringstream targetSS;
+
+ // Write the source side of the rule to sourceSS.
+ int i = 0;
+ for (std::vector<Symbol>::const_iterator p(sourceRHS.begin());
+ p != sourceRHS.end(); ++p, ++i) {
+ WriteSymbol(*p, sourceSS);
+ sourceSS << " ";
+ }
+ WriteSymbol(rule.GetSourceLHS(), sourceSS);
+
+ // Write the target side of the rule to targetSS.
+ i = 0;
+ for (std::vector<Symbol>::const_iterator p(targetRHS.begin());
+ p != targetRHS.end(); ++p, ++i) {
+ WriteSymbol(*p, targetSS);
+ targetSS << " ";
+ }
+ WriteSymbol(rule.GetTargetLHS(), targetSS);
+
+ // Write the rule to the forward and inverse extract files.
+ m_fwd << sourceSS.str() << " ||| " << targetSS.str() << " |||";
+ m_inv << targetSS.str() << " ||| " << sourceSS.str() << " |||";
+ for (Alignment::const_iterator p(alignment.begin());
+ p != alignment.end(); ++p) {
+ m_fwd << " " << p->first << "-" << p->second;
+ m_inv << " " << p->second << "-" << p->first;
+ }
+ m_fwd << " ||| 1" << std::endl;
+ m_inv << " ||| 1" << std::endl;
+}
+
+void ScfgRuleWriter::WriteSymbol(const Symbol &symbol, std::ostream &out)
+{
+ if (symbol.GetType() == NonTerminal) {
+ out << "[" << symbol.GetValue() << "]";
+ } else {
+ out << symbol.GetValue();
+ }
+}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.h b/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.h
new file mode 100644
index 000000000..edea1e95c
--- /dev/null
+++ b/scripts/training/phrase-extract/extract-ghkm/ScfgRuleWriter.h
@@ -0,0 +1,60 @@
+/***********************************************************************
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
+
+#pragma once
+#ifndef EXTRACT_GHKM_RULE_WRITER_H_
+#define EXTRACT_GHKM_RULE_WRITER_H_
+
+#include <ostream>
+
+namespace Moses {
+namespace GHKM {
+
+class Options;
+class ScfgRule;
+class Symbol;
+
+class ScfgRuleWriter
+{
+ public:
+ ScfgRuleWriter(std::ostream &fwd, std::ostream &inv, const Options &options)
+ : m_fwd(fwd)
+ , m_inv(inv)
+ , m_options(options) {}
+
+ void Write(const ScfgRule &);
+
+ private:
+ // Disallow copying
+ ScfgRuleWriter(const ScfgRuleWriter &);
+ ScfgRuleWriter &operator=(const ScfgRuleWriter &);
+
+ void WriteStandardFormat(const ScfgRule &);
+ void WriteUnpairedFormat(const ScfgRule &);
+ void WriteSymbol(const Symbol &, std::ostream &);
+
+ std::ostream &m_fwd;
+ std::ostream &m_inv;
+ const Options &m_options;
+};
+
+} // namespace GHKM
+} // namespace Moses
+
+#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Span.cpp b/scripts/training/phrase-extract/extract-ghkm/Span.cpp
index 56b224ee7..f0eccbdf2 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Span.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/Span.cpp
@@ -1,58 +1,46 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "Span.h"
-bool
-spansIntersect(const Span & a, const Span & b)
+namespace Moses {
+namespace GHKM {
+
+bool SpansIntersect(const Span &a, const ContiguousSpan &b)
{
- for (Span::const_iterator p(a.begin()); p != a.end(); ++p) {
- Span::const_iterator q = b.find(*p);
- if (q != b.end()) {
+ for (Span::const_iterator p = a.begin(); p != a.end(); ++p) {
+ if (*p >= b.first && *p <= b.second) {
return true;
}
}
return false;
}
-Span
-closure(const Span & s)
+ContiguousSpan Closure(const Span &s)
{
- Span result;
- if (s.empty()) {
- return result;
- }
- Span::const_iterator p(s.begin());
- int min = *p;
- int max = *p;
- ++p;
- for (; p != s.end(); ++p) {
- if (*p < min) {
- min = *p;
- }
- if (*p > max) {
- max = *p;
- }
- }
- for (int i = min; i <= max; ++i) {
- result.insert(i);
+ ContiguousSpan result(-1,-1);
+ if (!s.empty()) {
+ result.first = *(s.begin());
+ result.second = *(s.rbegin());
}
-
return result;
}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/Span.h b/scripts/training/phrase-extract/extract-ghkm/Span.h
index a0c14877d..003d1ef84 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Span.h
+++ b/scripts/training/phrase-extract/extract-ghkm/Span.h
@@ -1,35 +1,40 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef SPAN_H_INCLUDED_
-#define SPAN_H_INCLUDED_
+#ifndef EXTRACT_GHKM_SPAN_H_
+#define EXTRACT_GHKM_SPAN_H_
#include <map>
#include <set>
+namespace Moses {
+namespace GHKM {
+
typedef std::set<int> Span;
+typedef std::pair<int, int> ContiguousSpan;
+
+bool SpansIntersect(const Span &, const ContiguousSpan &);
-bool
-spansIntersect(const Span & a, const Span & b);
+ContiguousSpan Closure(const Span &);
-Span
-closure(const Span & s);
+} // namespace Moses
+} // namespace GHKM
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/Subgraph.cpp b/scripts/training/phrase-extract/extract-ghkm/Subgraph.cpp
index 44033aaa7..e5aedbb16 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Subgraph.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/Subgraph.cpp
@@ -1,133 +1,105 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "Subgraph.h"
-#include "AlignmentGraph.h"
+#include "Node.h"
-#include <cassert>
-
-Subgraph::Subgraph(Node * root)
- : m_root(root)
-{
- if (root->isSink()) {
- m_expandedNodes.insert(root);
- } else {
- m_expandableNodes.push(root);
- }
-}
+namespace Moses {
+namespace GHKM {
-bool
-Subgraph::isFragment() const
+void Subgraph::GetTargetLeaves(std::vector<const Node *> &result) const
{
- return !isTrivial();
+ result.clear();
+ GetTargetLeaves(m_root, result);
}
-bool
-Subgraph::canFormSCFGRule() const
+void Subgraph::GetTargetLeaves(const Node *root,
+ std::vector<const Node *> &result) const
{
- return isFragment()
- && (m_root->getType() == TREE)
- && !(m_root->getSpan().empty());
-}
-
-bool
-Subgraph::isTrivial() const
-{
- std::set<Node *> sinkNodes = getSinkNodes();
- return (sinkNodes.size() == 1) &&
- (sinkNodes.find(m_root) != sinkNodes.end());
-}
-
-bool
-Subgraph::isSinkNode(Node * n) const
-{
- assert(m_expandableNodes.empty());
- return m_expandedNodes.find(n) != m_expandedNodes.end();
-}
-
-std::set<Node *>
-Subgraph::getSinkNodes() const
-{
- std::set<Node *> sinkNodes;
- std::stack<Node *> expandable(m_expandableNodes);
- while (!expandable.empty()) {
- sinkNodes.insert(expandable.top());
- expandable.pop();
+ if (root->GetType() == TARGET || m_leaves.find(root) != m_leaves.end()) {
+ result.push_back(root);
+ } else {
+ const std::vector<Node*> &children = root->GetChildren();
+ for (std::vector<Node *>::const_iterator p(children.begin());
+ p != children.end(); ++p) {
+ GetTargetLeaves(*p, result);
+ }
}
- sinkNodes.insert(m_expandedNodes.begin(), m_expandedNodes.end());
- return sinkNodes;
}
-// Expand a single subgraph node. Return true if subgraph is fully expanded
-// or false otherwise.
-bool
-Subgraph::expand(const std::set<Node *> & frontierSet)
+int Subgraph::CountNodes(const Node *n) const
{
- if (m_expandableNodes.empty()) {
- return true;
+ if (n->GetType() != TREE) {
+ return 0;
}
-
- Node * n = m_expandableNodes.top();
- m_expandableNodes.pop();
-
- const std::vector<Node *> & children = n->getChildren();
- for (std::vector<Node *>::const_iterator p(children.begin());
+ if (IsTrivial()) {
+ return 1;
+ }
+ int count = 1;
+ const std::vector<Node*> &children = n->GetChildren();
+ for (std::vector<Node *>::const_iterator p = children.begin();
p != children.end(); ++p) {
- Node * child = *p;
- if (child->isSink()) {
- m_expandedNodes.insert(child);
- continue;
- }
- std::set<Node *>::const_iterator q = frontierSet.find(child);
- if (q == frontierSet.end()) { //child is not from the frontier set
- m_expandableNodes.push(child);
- } else if (child->getType() == TARGET) { // still need source word
- m_expandableNodes.push(child);
- } else {
- m_expandedNodes.insert(child);
+ const Node *child = *p;
+ if (m_leaves.find(child) == m_leaves.end()) {
+ count += CountNodes(child);
+ } else if (child->GetType() == TREE) {
+ ++count;
}
}
-
- return m_expandableNodes.empty();
+ return count;
}
-std::vector<Node *>
-Subgraph::getLeafNodes() const
+int Subgraph::CalcSize(const Node *n) const
{
- std::vector<Node *> leafNodes;
- std::set<Node *> sinkNodes(getSinkNodes());
- getLeafNodes(m_root, leafNodes, sinkNodes);
- return leafNodes;
+ if (n->GetType() != TREE || n->IsPreterminal()) {
+ return 0;
+ }
+ if (IsTrivial()) {
+ return 1;
+ }
+ int count = 1;
+ const std::vector<Node*> &children = n->GetChildren();
+ for (std::vector<Node *>::const_iterator p = children.begin();
+ p != children.end(); ++p) {
+ if (m_leaves.find(*p) == m_leaves.end()) {
+ count += CalcSize(*p);
+ }
+ }
+ return count;
}
-void
-Subgraph::getLeafNodes(Node * root, std::vector<Node *> & leafNodes,
- const std::set<Node *> & sinkNodes) const
+int Subgraph::CalcDepth(const Node *n) const
{
- if (root->getType() == TARGET || sinkNodes.find(root) != sinkNodes.end()) {
- leafNodes.push_back(root);
- } else {
- const std::vector<Node*> & children(root->getChildren());
- for (std::vector<Node *>::const_iterator p(children.begin());
- p != children.end(); ++p) {
- getLeafNodes(*p, leafNodes, sinkNodes);
+ if (n->GetType() != TREE || n->IsPreterminal() || m_leaves.empty()) {
+ return 0;
+ }
+ int maxChildDepth = 0;
+ const std::vector<Node*> &children = n->GetChildren();
+ for (std::vector<Node *>::const_iterator p = children.begin();
+ p != children.end(); ++p) {
+ if (m_leaves.find(*p) == m_leaves.end()) {
+ maxChildDepth = std::max(maxChildDepth, CalcDepth(*p));
}
}
+ return maxChildDepth + 1;
}
+
+} // namespace Moses
+} // namespace GHKM
diff --git a/scripts/training/phrase-extract/extract-ghkm/Subgraph.h b/scripts/training/phrase-extract/extract-ghkm/Subgraph.h
index 9b772d73a..e84903502 100644
--- a/scripts/training/phrase-extract/extract-ghkm/Subgraph.h
+++ b/scripts/training/phrase-extract/extract-ghkm/Subgraph.h
@@ -1,71 +1,81 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef SUBGRAPH_H_INCLUDED_
-#define SUBGRAPH_H_INCLUDED_
+#ifndef EXTRACT_GHKM_SUBGRAPH_H_
+#define EXTRACT_GHKM_SUBGRAPH_H_
-class Node;
+#include "Node.h"
#include <set>
-#include <stack>
#include <vector>
-class Subgraph
-{
-public:
- Subgraph(Node * root);
-
- bool
- isTrivial() const;
-
- bool
- isFragment() const;
-
- bool
- canFormSCFGRule() const;
+namespace Moses {
+namespace GHKM {
- bool
- isSinkNode(Node *) const;
-
- bool
- expand(const std::set<Node *> & frontierSet);
-
- const Node *
- getRoot() const {
- return m_root;
- };
-
- std::set<Node *>
- getSinkNodes() const;
-
- std::vector<Node *>
- getLeafNodes() const;
-
-private:
- Node * m_root;
- std::stack<Node *> m_expandableNodes;
- std::set<Node *> m_expandedNodes;
+class Node;
- void
- getLeafNodes(Node * root, std::vector<Node *> & leafNodes,
- const std::set<Node *> & sinkNodes) const;
+class Subgraph
+{
+ public:
+ Subgraph(const Node *root)
+ : m_root(root)
+ , m_depth(0)
+ , m_size(root->GetType() == TREE ? 1 : 0)
+ , m_nodeCount(1) {}
+
+ Subgraph(const Node *root, const std::set<const Node *> &leaves)
+ : m_root(root)
+ , m_leaves(leaves)
+ , m_depth(-1)
+ , m_size(-1)
+ , m_nodeCount(-1)
+ {
+ m_depth = CalcDepth(m_root);
+ m_size = CalcSize(m_root);
+ m_nodeCount = CountNodes(m_root);
+ }
+
+ const Node *GetRoot() const { return m_root; }
+ const std::set<const Node *> &GetLeaves() const { return m_leaves; }
+ int GetDepth() const { return m_depth; }
+ int GetSize() const { return m_size; }
+ int GetNodeCount() const { return m_nodeCount; }
+
+ bool IsTrivial() const { return m_leaves.empty(); }
+
+ void GetTargetLeaves(std::vector<const Node *> &) const;
+
+ private:
+ void GetTargetLeaves(const Node *, std::vector<const Node *> &) const;
+ int CalcDepth(const Node *) const;
+ int CalcSize(const Node *) const;
+ int CountNodes(const Node *) const;
+
+ const Node *m_root;
+ std::set<const Node *> m_leaves;
+ int m_depth;
+ int m_size;
+ int m_nodeCount;
};
+} // namespace GHKM
+} // namespace Moses
+
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.cpp b/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.cpp
index 74e0c6e43..ab4616918 100644
--- a/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.cpp
+++ b/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.cpp
@@ -1,21 +1,21 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#include "XmlTreeParser.h"
@@ -25,11 +25,12 @@
#include <cassert>
#include <vector>
-namespace
-{
-std::auto_ptr<ParseTree>
-parseXmlTree(std::vector<std::string>::const_iterator & p,
- const std::vector<std::string>::const_iterator & end)
+namespace Moses {
+namespace GHKM {
+
+std::auto_ptr<ParseTree> ParseXmlTree(
+ std::vector<std::string>::const_iterator &p,
+ const std::vector<std::string>::const_iterator &end)
{
std::auto_ptr<ParseTree> t;
@@ -45,11 +46,11 @@ parseXmlTree(std::vector<std::string>::const_iterator & p,
if (!isXmlTag(s)) {
p++;
- t.reset(new ParseTree(s));
+ t.reset(new ParseTree(unescape(s)));
return t;
}
- const std::string & tag = s;
+ const std::string &tag = s;
if (tag[1] == '/') {
// Closing tag. Don't advance p -- let caller handle it.
@@ -66,21 +67,22 @@ parseXmlTree(std::vector<std::string>::const_iterator & p,
}
p++;
- while (ParseTree * c = parseXmlTree(p, end).release()) {
- t->addChild(c);
- c->setParent(t.get());
+ while (ParseTree *c = ParseXmlTree(p, end).release()) {
+ t->AddChild(c);
+ c->SetParent(t.get());
}
p++; // Skip over closing tag
return t;
}
-}
-std::auto_ptr<ParseTree>
-parseXmlTree(const std::string & line)
+std::auto_ptr<ParseTree> ParseXmlTree(const std::string &line)
{
std::vector<std::string> xmlTokens(TokenizeXml(line));
std::vector<std::string>::const_iterator begin(xmlTokens.begin());
std::vector<std::string>::const_iterator end(xmlTokens.end());
- return parseXmlTree(begin, end);
+ return ParseXmlTree(begin, end);
}
+
+} // namespace GHKM
+} // namespace Moses
diff --git a/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.h b/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.h
index 11c4d4a33..16f4c3f8e 100644
--- a/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.h
+++ b/scripts/training/phrase-extract/extract-ghkm/XmlTreeParser.h
@@ -1,32 +1,37 @@
/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
+ Moses - statistical machine translation system
+ Copyright (C) 2006-2011 University of Edinburgh
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+***********************************************************************/
#pragma once
-#ifndef XMLTREEPARSER_H_INCLUDED_
-#define XMLTREEPARSER_H_INCLUDED_
+#ifndef EXTRACT_GHKM_XML_TREE_PARSER_H_
+#define EXTRACT_GHKM_XML_TREE_PARSER_H_
#include <memory>
#include <string>
+namespace Moses {
+namespace GHKM {
+
class ParseTree;
-std::auto_ptr<ParseTree>
-parseXmlTree(const std::string &);
+std::auto_ptr<ParseTree> ParseXmlTree(const std::string &);
+
+} // namespace GHKM
+} // namespace Moses
#endif
diff --git a/scripts/training/phrase-extract/extract-ghkm/extract-ghkm.cpp b/scripts/training/phrase-extract/extract-ghkm/extract-ghkm.cpp
deleted file mode 100644
index 453e810ea..000000000
--- a/scripts/training/phrase-extract/extract-ghkm/extract-ghkm.cpp
+++ /dev/null
@@ -1,263 +0,0 @@
-/***********************************************************************
- Moses - factored phrase-based language decoder
- Copyright (C) 2010 University of Edinburgh
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ***********************************************************************/
-
-////////////////////////////////////////////////////////////////////////////////
-//
-// extract-ghkm
-// SCFG grammar rule extractor based on the GHKM algorithm described in:
-//
-// Galley, M., Hopkins, M., Knight, K., and Marcu, D. (2004)
-// "What's in a Translation Rule?", In Proceedings of HLT/NAACL 2004.
-//
-////////////////////////////////////////////////////////////////////////////////
-
-#include "Alignment.h"
-#include "AlignmentGraph.h"
-#include "Exception.h"
-#include "ParseTree.h"
-#include "Rule.h"
-#include "Span.h"
-#include "XmlTreeParser.h"
-
-#include <cassert>
-#include <cstdlib>
-#include <fstream>
-#include <iostream>
-#include <string>
-#include <sstream>
-#include <vector>
-
-namespace
-{
-const std::string progName = "extract-ghkm";
-
-void
-printUsage()
-{
- std::cerr << "Usage: "
- << progName << " TARGET SOURCE ALIGNMENT EXTRACT"
- << std::endl;
-}
-
-void
-printErrorMsg(const std::string & errorMsg)
-{
- std::cerr << progName << ": " << errorMsg << std::endl;
-}
-
-void
-printSymbol(const Symbol & symbol, std::ostream & out)
-{
- if (symbol.getType() == NonTerminal) {
- out << "[" << symbol.getValue() << "]";
- } else {
- out << symbol.getValue();
- }
-}
-
-void
-printRule(const Rule & rule, std::ostream & out, std::ostream & invOut)
-{
- const std::vector<Symbol> & sourceRHS = rule.getSourceRHS();
- const std::vector<Symbol> & targetRHS = rule.getTargetRHS();
-
- // TODO Just create maps for NTs (one-to-one)
- std::map<int, std::vector<int> > sourceToTarget;
- std::map<int, std::vector<int> > targetToSource;
-
- const Alignment & alignment = rule.getAlignment();
-
- for (Alignment::const_iterator p(alignment.begin());
- p != alignment.end(); ++p) {
- sourceToTarget[p->first].push_back(p->second);
- targetToSource[p->second].push_back(p->first);
- }
-
- std::ostringstream sourceSS;
- std::ostringstream targetSS;
-
- int i = 0;
- for (std::vector<Symbol>::const_iterator p(sourceRHS.begin());
- p != sourceRHS.end(); ++p, ++i) {
- printSymbol(*p, sourceSS);
- if (p->getType() == NonTerminal) {
- assert(sourceToTarget.find(i) != sourceToTarget.end());
- const std::vector<int> & targetIndices = sourceToTarget[i];
- assert(targetIndices.size() == 1);
- int targetIndex = targetIndices[0];
- printSymbol(targetRHS[targetIndex], sourceSS);
- }
- sourceSS << " ";
- }
- printSymbol(rule.getSourceLHS(), sourceSS);
-
- i = 0;
- for (std::vector<Symbol>::const_iterator p(targetRHS.begin());
- p != targetRHS.end(); ++p, ++i) {
- if (p->getType() == NonTerminal) {
- assert(targetToSource.find(i) != targetToSource.end());
- const std::vector<int> & sourceIndices = targetToSource[i];
- assert(sourceIndices.size() == 1);
- int sourceIndex = sourceIndices[0];
- printSymbol(sourceRHS[sourceIndex], targetSS);
- }
- printSymbol(*p, targetSS);
- targetSS << " ";
- }
-
- printSymbol(rule.getTargetLHS(), targetSS);
-
- out << sourceSS.str() << " ||| " << targetSS.str() << " |||";
- invOut << targetSS.str() << " ||| " << sourceSS.str() << " |||";
-
- for (Alignment::const_iterator p(alignment.begin());
- p != alignment.end(); ++p) {
- out << " " << p->first << "-" << p->second;
- invOut << " " << p->second << "-" << p->first;
- }
-
- out << " ||| 1" << std::endl;
- invOut << " ||| 1" << std::endl;
-}
-
-std::vector<std::string>
-readTokens(const std::string & s)
-{
- std::vector<std::string> tokens;
-
- std::string whitespace = " \t";
-
- std::string::size_type begin = s.find_first_not_of(whitespace);
- assert(begin != std::string::npos);
- while (true) {
- std::string::size_type end = s.find_first_of(whitespace, begin);
- std::string token;
- if (end == std::string::npos) {
- token = s.substr(begin);
- } else {
- token = s.substr(begin, end-begin);
- }
- tokens.push_back(token);
- if (end == std::string::npos) {
- break;
- }
- begin = s.find_first_not_of(whitespace, end);
- if (begin == std::string::npos) {
- break;
- }
- }
-
- return tokens;
-}
-}
-
-int
-main(int argc, char * argv[])
-{
- if (argc != 5) {
- printUsage();
- exit(1);
- }
-
- std::ifstream targetStream(argv[1]);
- if (!targetStream) {
- printErrorMsg("Failed to open file: " + std::string(argv[1]));
- exit(1);
- }
-
- std::ifstream sourceStream(argv[2]);
- if (!sourceStream) {
- printErrorMsg("Failed to open file: " + std::string(argv[2]));
- exit(1);
- }
-
- std::ifstream alignmentStream(argv[3]);
- if (!alignmentStream) {
- printErrorMsg("Failed to open file: " + std::string(argv[3]));
- exit(1);
- }
-
- std::ofstream extractStream(argv[4]);
- if (!extractStream) {
- printErrorMsg("Failed to open file: " + std::string(argv[4]));
- exit(1);
- }
-
- std::string invExtractFileName = std::string(argv[4]) + std::string(".inv");
- std::ofstream invExtractStream(invExtractFileName.c_str());
- if (!invExtractStream) {
- printErrorMsg("Failed to open file: " + invExtractFileName);
- exit(1);
- }
-
- size_t lineNum = 0;
- while (true) {
- std::string targetLine;
- std::getline(targetStream, targetLine);
-
- std::string sourceLine;
- std::getline(sourceStream, sourceLine);
-
- std::string alignmentLine;
- std::getline(alignmentStream, alignmentLine);
-
- if (targetStream.eof() && sourceStream.eof() && alignmentStream.eof()) {
- break;
- }
-
- if (targetStream.eof() || sourceStream.eof() || alignmentStream.eof()) {
- printErrorMsg("Files must contain same number of lines");
- exit(1);
- }
-
- ++lineNum;
-
- std::auto_ptr<ParseTree> t(parseXmlTree(targetLine));
- if (!t.get()) {
- std::ostringstream s;
- s << "Failed to parse XML tree at line " << lineNum;
- printErrorMsg(s.str());
- exit(1);
- }
-
- std::vector<std::string> sourceTokens(readTokens(sourceLine));
-
- Alignment alignment;
- try {
- alignment = readAlignment(alignmentLine);
- } catch (const Exception & e) {
- std::ostringstream s;
- s << "Failed to read alignment at line " << lineNum << ": ";
- s << e.getMsg();
- printErrorMsg(s.str());
- exit(1);
- }
-
- AlignmentGraph graph(t.get(), sourceTokens, alignment);
-
- std::vector<Rule> rules(graph.inferRules());
-
- for (std::vector<Rule>::iterator p(rules.begin());
- p != rules.end(); ++p) {
- printRule(*p, extractStream, invExtractStream);
- }
- }
-
- return 0;
-}
diff --git a/scripts/training/symal/Jamfile b/scripts/training/symal/Jamfile
new file mode 100644
index 000000000..899046bd8
--- /dev/null
+++ b/scripts/training/symal/Jamfile
@@ -0,0 +1,3 @@
+exe symal : symal.cpp cmd.c ;
+
+install dist : symal : <location>. ;
diff --git a/scripts/training/symal/Makefile b/scripts/training/symal/Makefile
deleted file mode 100644
index 49381b2d6..000000000
--- a/scripts/training/symal/Makefile
+++ /dev/null
@@ -1,11 +0,0 @@
-
-all: symal
-
-clean:
- rm -f *.o
-
-cmd.o: cmd.c cmd.h
- $(CC) -O3 -c -o cmd.o cmd.c
-
-symal: symal.cpp cmd.o
- $(CXX) -O3 -o $@ $(@).cpp cmd.o
diff --git a/scripts/training/train-model.perl b/scripts/training/train-model.perl.missing_bin_dir
index 2c7a2b706..d6fa95b34 100755
--- a/scripts/training/train-model.perl
+++ b/scripts/training/train-model.perl.missing_bin_dir
@@ -30,7 +30,7 @@ my($_ROOT_DIR, $_CORPUS_DIR, $_GIZA_E2F, $_GIZA_F2E, $_MODEL_DIR, $_TEMP_DIR, $_
$_DECODING_STEPS, $_PARALLEL, $_FACTOR_DELIMITER, @_PHRASE_TABLE,
@_REORDERING_TABLE, @_GENERATION_TABLE, @_GENERATION_TYPE, $_GENERATION_CORPUS,
$_DONT_ZIP, $_MGIZA, $_MGIZA_CPUS, $_HMM_ALIGN, $_CONFIG,
- $_HIERARCHICAL,$_XML,$_SOURCE_SYNTAX,$_TARGET_SYNTAX,$_GLUE_GRAMMAR,$_GLUE_GRAMMAR_FILE,$_UNKNOWN_WORD_LABEL_FILE,$_EXTRACT_OPTIONS,$_SCORE_OPTIONS,
+ $_HIERARCHICAL,$_XML,$_SOURCE_SYNTAX,$_TARGET_SYNTAX,$_GLUE_GRAMMAR,$_GLUE_GRAMMAR_FILE,$_UNKNOWN_WORD_LABEL_FILE,$_GHKM,$_EXTRACT_OPTIONS,$_SCORE_OPTIONS,
$_PHRASE_WORD_ALIGNMENT,$_FORCE_FACTORED_FILENAMES,
$_MEMSCORE, $_FINAL_ALIGNMENT_MODEL,
$_CONTINUE,$_MAX_LEXICAL_REORDERING,$_DO_STEPS,
@@ -99,6 +99,7 @@ $_HELP = 1
'glue-grammar' => \$_GLUE_GRAMMAR,
'glue-grammar-file=s' => \$_GLUE_GRAMMAR_FILE,
'unknown-word-label-file=s' => \$_UNKNOWN_WORD_LABEL_FILE,
+ 'ghkm' => \$_GHKM,
'extract-options=s' => \$_EXTRACT_OPTIONS,
'score-options=s' => \$_SCORE_OPTIONS,
'source-syntax' => \$_SOURCE_SYNTAX,
@@ -197,7 +198,13 @@ my $MKCLS = "$BINDIR/mkcls";
# supporting scripts/binaries from this package
my $PHRASE_EXTRACT = "$SCRIPTS_ROOTDIR/training/phrase-extract/extract";
-my $RULE_EXTRACT = "$SCRIPTS_ROOTDIR/training/phrase-extract/extract-rules";
+my $RULE_EXTRACT;
+if (defined($_GHKM)) {
+ $RULE_EXTRACT = "$SCRIPTS_ROOTDIR/training/phrase-extract/extract-ghkm/tools/extract-ghkm";
+}
+else {
+ $RULE_EXTRACT = "$SCRIPTS_ROOTDIR/training/phrase-extract/extract-rules";
+}
my $LEXICAL_REO_SCORER = "$SCRIPTS_ROOTDIR/training/lexical-reordering/score";
my $MEMSCORE = "$SCRIPTS_ROOTDIR/training/memscore/memscore";
my $EPPEX = "$SCRIPTS_ROOTDIR/training/eppex/eppex";
@@ -1305,8 +1312,10 @@ sub extract_phrase {
$cmd = "$RULE_EXTRACT $alignment_file_e $alignment_file_f $alignment_file_a $extract_file";
$cmd .= " --GlueGrammar $___GLUE_GRAMMAR_FILE" if $_GLUE_GRAMMAR;
$cmd .= " --UnknownWordLabel $_UNKNOWN_WORD_LABEL_FILE" if $_TARGET_SYNTAX && defined($_UNKNOWN_WORD_LABEL_FILE);
- $cmd .= " --SourceSyntax" if $_SOURCE_SYNTAX;
- $cmd .= " --TargetSyntax" if $_TARGET_SYNTAX;
+ if (!defined($_GHKM)) {
+ $cmd .= " --SourceSyntax" if $_SOURCE_SYNTAX;
+ $cmd .= " --TargetSyntax" if $_TARGET_SYNTAX;
+ }
$cmd .= " ".$_EXTRACT_OPTIONS if defined($_EXTRACT_OPTIONS);
}
else
diff --git a/server/Makefile.am b/server/Makefile.am
deleted file mode 100644
index 925a8c40f..000000000
--- a/server/Makefile.am
+++ /dev/null
@@ -1,5 +0,0 @@
-bin_PROGRAMS = mosesserver
-mosesserver_SOURCES = mosesserver.cpp
-mosesserver_CPPFLAGS = -W -Wall -I$(top_srcdir)/moses/src $(XMLRPC_C_CPPFLAGS) $(BOOST_CPPFLAGS)
-mosesserver_LDADD = -L$(top_srcdir)/moses/src -lmoses -L$(top_srcdir)/OnDiskPt/src -lOnDiskPt $(top_srcdir)/util/libkenutil.la $(top_srcdir)/lm/libkenlm.la $(BOOST_THREAD_LDFLAGS) $(XMLRPC_C_LIBS) $(BOOST_THREAD_LIBS)
-mosesserver_DEPENDENCIES = $(top_srcdir)/moses/src/libmoses.la $(top_srcdir)/OnDiskPt/src/libOnDiskPt.a
diff --git a/util/Jamfile b/util/Jamfile
new file mode 100644
index 000000000..b89149221
--- /dev/null
+++ b/util/Jamfile
@@ -0,0 +1,10 @@
+lib kenutil : bit_packing.cc ersatz_progress.cc exception.cc file.cc file_piece.cc mmap.cc murmur_hash.cc ..//z : <include>.. : : <include>.. ;
+
+import testing ;
+
+unit-test bit_packing_test : bit_packing_test.cc kenutil ..//boost_unit_test_framework ;
+run file_piece_test.cc kenutil ..//boost_unit_test_framework : : file_piece.cc ;
+unit-test joint_sort_test : joint_sort_test.cc kenutil ..//boost_unit_test_framework ;
+unit-test probing_hash_table_test : probing_hash_table_test.cc kenutil ..//boost_unit_test_framework ;
+unit-test sorted_uniform_test : sorted_uniform_test.cc kenutil ..//boost_unit_test_framework ;
+unit-test tokenize_piece_test : tokenize_piece_test.cc kenutil ..//boost_unit_test_framework ;
diff --git a/util/Makefile.am b/util/Makefile.am
deleted file mode 100644
index c567793ff..000000000
--- a/util/Makefile.am
+++ /dev/null
@@ -1,12 +0,0 @@
-lib_LTLIBRARIES = libkenutil.la
-
-AM_CPPFLAGS = -W -Wall -ffor-scope -D_FILE_OFFSET_BITS=64 -D_LARGE_FILES $(BOOST_CPPFLAGS)
-
-libkenutil_la_SOURCES = \
- bit_packing.cc \
- ersatz_progress.cc \
- exception.cc \
- file.cc \
- file_piece.cc \
- murmur_hash.cc \
- mmap.cc
diff --git a/util/check.hh b/util/check.hh
new file mode 100644
index 000000000..2c63b5630
--- /dev/null
+++ b/util/check.hh
@@ -0,0 +1,21 @@
+/* People have been abusing assert by assuming it will always execute. To
+ * rememdy the situation, asserts were replaced with CHECK. These should then
+ * be manually replaced with assert (when used correctly) or UTIL_THROW (for
+ * runtime checks).
+ */
+#ifndef UTIL_CHECK__
+#define UTIL_CHECK__
+
+#include <stdlib.h>
+#include <iostream>
+
+#include <cassert>
+
+#define CHECK(Condition) do { \
+ if (!(Condition)) { \
+ std::cerr << "Check " << #Condition << " failed in " << __FILE__ << ":" << __LINE__ << std::endl; \
+ abort(); \
+ } \
+} while (0) // swallow ;
+
+#endif // UTIL_CHECK__
diff --git a/util/file.cc b/util/file.cc
index 81d1d490c..77922cfad 100644
--- a/util/file.cc
+++ b/util/file.cc
@@ -1,7 +1,6 @@
#include "util/file.hh"
#include "util/exception.hh"
-#include "util/portability.hh"
#include <cstdlib>
#include <cstdio>
@@ -14,6 +13,7 @@
#if defined(_WIN32) || defined(_WIN64)
#include <windows.h>
+#include <io.h>
#endif
namespace util {
@@ -43,11 +43,28 @@ int OpenReadOrThrow(const char *name) {
}
uint64_t SizeFile(int fd) {
+#if defined(_WIN32) || defined(_WIN64)
+ __int64 ret = _filelengthi64(fd);
+ return (ret == -1) ? kBadSize : ret;
+#else
struct stat sb;
if (fstat(fd, &sb) == -1 || (!sb.st_size && !S_ISREG(sb.st_mode))) return kBadSize;
return sb.st_size;
+#endif
+}
+
+void ResizeOrThrow(int fd, uint64_t to) {
+#if defined(_WIN32) || defined(_WIN64)
+ UTIL_THROW_IF(_chsize_s(fd, to), ErrnoException, "Resizing to " << to << " bytes failed");
+#else
+ UTIL_THROW_IF(ftruncate(fd, to), ErrnoException, "Resizing to " << to << " bytes failed");
+#endif
}
+#ifdef WIN32
+typedef int ssize_t;
+#endif
+
void ReadOrThrow(int fd, void *to_void, std::size_t amount) {
uint8_t *to = static_cast<uint8_t*>(to_void);
while (amount) {
diff --git a/util/file.hh b/util/file.hh
index b820ba351..04023dec0 100644
--- a/util/file.hh
+++ b/util/file.hh
@@ -71,6 +71,8 @@ int OpenReadOrThrow(const char *name);
const uint64_t kBadSize = (uint64_t)-1;
uint64_t SizeFile(int fd);
+void ResizeOrThrow(int fd, uint64_t to);
+
void ReadOrThrow(int fd, void *to, std::size_t size);
std::size_t ReadOrEOF(int fd, void *to_void, std::size_t amount);
diff --git a/util/file_piece.cc b/util/file_piece.cc
index 43b578b65..f0d49d555 100644
--- a/util/file_piece.cc
+++ b/util/file_piece.cc
@@ -3,7 +3,9 @@
#include "util/exception.hh"
#include "util/file.hh"
#include "util/mmap.hh"
-#include "util/portability.hh"
+#ifdef WIN32
+#include <io.h>
+#endif // WIN32
#include <iostream>
#include <string>
@@ -130,7 +132,7 @@ void FilePiece::Initialize(const char *name, std::ostream *show_progress, std::s
namespace {
void ParseNumber(const char *begin, char *&end, float &out) {
-#ifdef sun
+#if defined(sun) || defined(WIN32)
out = static_cast<float>(strtod(begin, &end));
#else
out = strtof(begin, &end);
@@ -255,6 +257,10 @@ void FilePiece::TransitionToRead() {
#endif
}
+#ifdef WIN32
+typedef int ssize_t;
+#endif
+
void FilePiece::ReadShift() {
assert(fallback_to_read_);
// Bytes [data_.begin(), position_) have been consumed.
diff --git a/util/file_piece_test.cc b/util/file_piece_test.cc
index dc9ec7e7c..f912e18af 100644
--- a/util/file_piece_test.cc
+++ b/util/file_piece_test.cc
@@ -1,3 +1,4 @@
+// Tests might fail if you have creative characters in your path. Sue me.
#include "util/file_piece.hh"
#include "util/scoped.hh"
@@ -14,10 +15,18 @@
namespace util {
namespace {
+std::string FileLocation() {
+ if (boost::unit_test::framework::master_test_suite().argc < 2) {
+ return "file_piece.cc";
+ }
+ std::string ret(boost::unit_test::framework::master_test_suite().argv[1]);
+ return ret;
+}
+
/* mmap implementation */
BOOST_AUTO_TEST_CASE(MMapReadLine) {
- std::fstream ref("file_piece.cc", std::ios::in);
- FilePiece test("file_piece.cc", NULL, 1);
+ std::fstream ref(FileLocation().c_str(), std::ios::in);
+ FilePiece test(FileLocation().c_str(), NULL, 1);
std::string ref_line;
while (getline(ref, ref_line)) {
StringPiece test_line(test.ReadLine());
@@ -35,9 +44,13 @@ BOOST_AUTO_TEST_CASE(MMapReadLine) {
*/
/* read() implementation */
BOOST_AUTO_TEST_CASE(StreamReadLine) {
- std::fstream ref("file_piece.cc", std::ios::in);
+ std::fstream ref(FileLocation().c_str(), std::ios::in);
+
+ std::string popen_args = "cat \"";
+ popen_args += FileLocation();
+ popen_args += '"';
- FILE *catter = popen("cat file_piece.cc", "r");
+ FILE *catter = popen(popen_args.c_str(), "r");
BOOST_REQUIRE(catter);
FilePiece test(dup(fileno(catter)), "file_piece.cc", NULL, 1);
@@ -58,10 +71,15 @@ BOOST_AUTO_TEST_CASE(StreamReadLine) {
// gzip file
BOOST_AUTO_TEST_CASE(PlainZipReadLine) {
- std::fstream ref("file_piece.cc", std::ios::in);
+ std::string location(FileLocation());
+ std::fstream ref(location.c_str(), std::ios::in);
- BOOST_REQUIRE_EQUAL(0, system("gzip <file_piece.cc >file_piece.cc.gz"));
- FilePiece test("file_piece.cc.gz", NULL, 1);
+ std::string command("gzip <\"");
+ command += location + "\" >\"" + location + "\".gz";
+
+ BOOST_REQUIRE_EQUAL(0, system(command.c_str()));
+ FilePiece test((location + ".gz").c_str(), NULL, 1);
+ unlink((location + ".gz").c_str());
std::string ref_line;
while (getline(ref, ref_line)) {
StringPiece test_line(test.ReadLine());
@@ -77,12 +95,15 @@ BOOST_AUTO_TEST_CASE(PlainZipReadLine) {
// the test.
#ifndef __APPLE__
BOOST_AUTO_TEST_CASE(StreamZipReadLine) {
- std::fstream ref("file_piece.cc", std::ios::in);
+ std::fstream ref(FileLocation().c_str(), std::ios::in);
+
+ std::string command("gzip <\"");
+ command += FileLocation() + "\"";
- FILE * catter = popen("gzip <file_piece.cc", "r");
+ FILE * catter = popen(command.c_str(), "r");
BOOST_REQUIRE(catter);
- FilePiece test(dup(fileno(catter)), "file_piece.cc", NULL, 1);
+ FilePiece test(dup(fileno(catter)), "file_piece.cc.gz", NULL, 1);
std::string ref_line;
while (getline(ref, ref_line)) {
StringPiece test_line(test.ReadLine());
diff --git a/util/getopt.c b/util/getopt.c
new file mode 100644
index 000000000..5dfe545de
--- /dev/null
+++ b/util/getopt.c
@@ -0,0 +1,77 @@
+/*
+POSIX getopt for Windows
+
+AT&T Public License
+
+Code given out at the 1985 UNIFORUM conference in Dallas.
+*/
+
+#ifndef __GNUC__
+
+#include "getopt.hh"
+#include <stdio.h>
+
+#define NULL 0
+#define EOF (-1)
+#define ERR(s, c) if(opterr){\
+ char errbuf[2];\
+ errbuf[0] = c; errbuf[1] = '\n';\
+ fputs(argv[0], stderr);\
+ fputs(s, stderr);\
+ fputc(c, stderr);}
+ //(void) write(2, argv[0], (unsigned)strlen(argv[0]));\
+ //(void) write(2, s, (unsigned)strlen(s));\
+ //(void) write(2, errbuf, 2);}
+
+int opterr = 1;
+int optind = 1;
+int optopt;
+char *optarg;
+
+int
+getopt(argc, argv, opts)
+int argc;
+char **argv, *opts;
+{
+ static int sp = 1;
+ register int c;
+ register char *cp;
+
+ if(sp == 1)
+ if(optind >= argc ||
+ argv[optind][0] != '-' || argv[optind][1] == '\0')
+ return(EOF);
+ else if(strcmp(argv[optind], "--") == NULL) {
+ optind++;
+ return(EOF);
+ }
+ optopt = c = argv[optind][sp];
+ if(c == ':' || (cp=strchr(opts, c)) == NULL) {
+ ERR(": illegal option -- ", c);
+ if(argv[optind][++sp] == '\0') {
+ optind++;
+ sp = 1;
+ }
+ return('?');
+ }
+ if(*++cp == ':') {
+ if(argv[optind][sp+1] != '\0')
+ optarg = &argv[optind++][sp+1];
+ else if(++optind >= argc) {
+ ERR(": option requires an argument -- ", c);
+ sp = 1;
+ return('?');
+ } else
+ optarg = argv[optind++];
+ sp = 1;
+ } else {
+ if(argv[optind][++sp] == '\0') {
+ sp = 1;
+ optind++;
+ }
+ optarg = NULL;
+ }
+ return(c);
+}
+
+#endif /* __GNUC__ */ \ No newline at end of file
diff --git a/util/getopt.hh b/util/getopt.hh
index 33a706833..6ad977324 100755..100644
--- a/util/getopt.hh
+++ b/util/getopt.hh
@@ -1,190 +1,33 @@
+/*
+POSIX getopt for Windows
-/* getopt.h */
-/* Declarations for getopt.
- Copyright (C) 1989-1994, 1996-1999, 2001 Free Software
- Foundation, Inc. This file is part of the GNU C Library.
+AT&T Public License
- The GNU C Library is free software; you can redistribute
- it and/or modify it under the terms of the GNU Lesser
- General Public License as published by the Free Software
- Foundation; either version 2.1 of the License, or
- (at your option) any later version.
+Code given out at the 1985 UNIFORUM conference in Dallas.
+*/
- The GNU C Library is distributed in the hope that it will
- be useful, but WITHOUT ANY WARRANTY; without even the
- implied warranty of MERCHANTABILITY or FITNESS FOR A
- PARTICULAR PURPOSE. See the GNU Lesser General Public
- License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with the GNU C Library; if not, write
- to the Free Software Foundation, Inc., 59 Temple Place,
- Suite 330, Boston, MA 02111-1307 USA. */
-
-
-
-
-#ifndef _GETOPT_H
-
-#ifndef __need_getopt
-# define _GETOPT_H 1
+#ifdef __GNUC__
+#include <getopt.h>
#endif
+#ifndef __GNUC__
-/* If __GNU_LIBRARY__ is not already defined, either we are being used
- standalone, or this is the first header included in the source file.
- If we are being used with glibc, we need to include <features.h>, but
- that does not exist if we are standalone. So: if __GNU_LIBRARY__ is
- not defined, include <ctype.h>, which will pull in <features.h> for us
- if it's from glibc. (Why ctype.h? It's guaranteed to exist and it
- doesn't flood the namespace with stuff the way some other headers do.) */
-#if !defined __GNU_LIBRARY__
-# include <ctype.h>
-#endif
+#ifndef _WINGETOPT_H_
+#define _WINGETOPT_H_
-#ifdef __cplusplus
+#ifdef __cplusplus
extern "C" {
#endif
-int getopt (int argc, char *const *argv, const char *optstring);
-
-/* For communication from `getopt' to the caller.
- When `getopt' finds an option that takes an argument,
- the argument value is returned here.
- Also, when `ordering' is RETURN_IN_ORDER,
- each non-option ARGV-element is returned here. */
-
-extern char *optarg;
-
-/* Index in ARGV of the next element to be scanned.
- This is used for communication to and from the caller
- and for communication between successive calls to `getopt'.
-
- On entry to `getopt', zero means this is the first call; initialize.
-
- When `getopt' returns -1, this is the index of the first of the
- non-option elements that the caller should itself scan.
-
- Otherwise, `optind' communicates from one call to the next
- how much of ARGV has been scanned so far. */
-
-extern int optind;
-
-/* Callers store zero here to inhibit the error message `getopt' prints
- for unrecognized options. */
-
extern int opterr;
-
-/* Set to an option character which was unrecognized. */
-
+extern int optind;
extern int optopt;
+extern char *optarg;
+extern int getopt(int argc, char **argv, char *opts);
-#ifndef __need_getopt
-/* Describe the long-named options requested by the application.
- The LONG_OPTIONS argument to getopt_long or getopt_long_only is a vector
- of `struct option' terminated by an element containing a name which is
- zero.
-
- The field `has_arg' is:
- no_argument (or 0) if the option does not take an argument,
- required_argument (or 1) if the option requires an argument,
- optional_argument (or 2) if the option takes an optional argument.
-
- If the field `flag' is not NULL, it points to a variable that is set
- to the value given in the field `val' when the option is found, but
- left unchanged if the option is not found.
-
- To have a long-named option do something other than set an `int' to
- a compiled-in constant, such as set a value from `optarg', set the
- option's `flag' field to zero and its `val' field to a nonzero
- value (the equivalent single-letter option character, if there is
- one). For long options that have a zero `flag' field, `getopt'
- returns the contents of the `val' field. */
-
-struct option
-{
-# if (defined __STDC__ && __STDC__) || defined __cplusplus
- const char *name;
-# else
- char *name;
-# endif
- /* has_arg can't be an enum because some compilers complain about
- type mismatches in all the code that assumes it is an int. */
- int has_arg;
- int *flag;
- int val;
-};
-
-/* Names for the values of the `has_arg' field of `struct option'. */
-
-# define no_argument 0
-# define required_argument 1
-# define optional_argument 2
-#endif /* need getopt */
-
-
-/* Get definitions and prototypes for functions to process the
- arguments in ARGV (ARGC of them, minus the program name) for
- options given in OPTS.
-
- Return the option character from OPTS just read. Return -1 when
- there are no more options. For unrecognized options, or options
- missing arguments, `optopt' is set to the option letter, and '?' is
- returned.
-
- The OPTS string is a list of characters which are recognized option
- letters, optionally followed by colons, specifying that that letter
- takes an argument, to be placed in `optarg'.
-
- If a letter in OPTS is followed by two colons, its argument is
- optional. This behavior is specific to the GNU `getopt'.
-
- The argument `--' causes premature termination of argument
- scanning, explicitly telling `getopt' that there are no more
- options.
-
- If OPTS begins with `--', then non-option arguments are treated as
- arguments to the option '\0'. This behavior is specific to the GNU
- `getopt'. */
-
-#if (defined __STDC__ && __STDC__) || defined __cplusplus
-# ifdef __GNU_LIBRARY__
-/* Many other libraries have conflicting prototypes for getopt, with
- differences in the consts, in stdlib.h. To avoid compilation
- errors, only prototype getopt for the GNU C library. */
-extern int getopt (int ___argc, char *const *___argv, const char *__shortopts);
-# else /* not __GNU_LIBRARY__ */
-//extern int getopt ();
-# endif /* __GNU_LIBRARY__ */
-
-# ifndef __need_getopt
-extern int getopt_long (int ___argc, char *const *___argv,
- const char *__shortopts,
- const struct option *__longopts, int *__longind);
-extern int getopt_long_only (int ___argc, char *const *___argv,
- const char *__shortopts,
- const struct option *__longopts, int *__longind);
-
-/* Internal only. Users should not call this directly. */
-extern int _getopt_internal (int ___argc, char *const *___argv,
- const char *__shortopts,
- const struct option *__longopts, int *__longind,
- int __long_only);
-# endif
-#else /* not __STDC__ */
-extern int getopt ();
-# ifndef __need_getopt
-extern int getopt_long ();
-extern int getopt_long_only ();
-
-extern int _getopt_internal ();
-# endif
-#endif /* __STDC__ */
-
-#ifdef __cplusplus
+#ifdef __cplusplus
}
#endif
-/* Make sure we later can get all the definitions and declarations. */
-#undef __need_getopt
+#endif /* _GETOPT_H_ */
+#endif /* __GNUC__ */
-#endif /* getopt.h */
diff --git a/util/have.hh b/util/have.hh
index 9e1fc20cd..aca8c6264 100644
--- a/util/have.hh
+++ b/util/have.hh
@@ -1,9 +1,21 @@
-/* This ties kenlm's config into Moses's build system. If you are using kenlm
- * outside Moses, see http://kheafield.com/code/kenlm/developers/ .
- */
+/* Optional packages. You might want to integrate this with your build system e.g. config.h from ./configure. */
#ifndef UTIL_HAVE__
#define UTIL_HAVE__
+#ifndef HAVE_ZLIB
#define HAVE_ZLIB
+#endif
+
+#ifndef HAVE_ICU
+//#define HAVE_ICU
+#endif
+
+#ifndef HAVE_BOOST
+//#define HAVE_BOOST
+#endif
+
+#ifndef HAVE_THREADS
+//#define HAVE_THREADS
+#endif
#endif // UTIL_HAVE__
diff --git a/util/mmap.cc b/util/mmap.cc
index 3dfe0ab2c..d3a2526fa 100644
--- a/util/mmap.cc
+++ b/util/mmap.cc
@@ -13,13 +13,14 @@
#include <fcntl.h>
#include <sys/types.h>
#include <sys/stat.h>
+#include <stdlib.h>
+
#if defined(_WIN32) || defined(_WIN64)
#include <windows.h>
+#include <io.h>
#else
#include <sys/mman.h>
#endif
-#include <stdlib.h>
-#include <unistd.h>
namespace util {
@@ -102,7 +103,7 @@ void *MapOrThrow(std::size_t size, bool for_write, int flags, bool prefault, int
int protectM = for_write ? FILE_MAP_WRITE : FILE_MAP_READ;
HANDLE hMapping = CreateFileMapping((HANDLE)_get_osfhandle(fd), NULL, protectC, 0, size + offset, NULL);
UTIL_THROW_IF(!hMapping, ErrnoException, "CreateFileMapping failed");
- ret = MapViewOfFile(hMapping, protectM, 0, offset, size);
+ LPVOID ret = MapViewOfFile(hMapping, protectM, 0, offset, size);
CloseHandle(hMapping);
UTIL_THROW_IF(!ret, ErrnoException, "MapViewOfFile failed");
#else
@@ -159,8 +160,8 @@ void *MapAnonymous(std::size_t size) {
}
void *MapZeroedWrite(int fd, std::size_t size) {
- UTIL_THROW_IF(-1 == ftruncate(fd, 0), ErrnoException, "ftruncate on fd " << fd << " to 0 failed");
- UTIL_THROW_IF(-1 == ftruncate(fd, size), ErrnoException, "ftruncate on fd " << fd << " to " << size << " failed");
+ ResizeOrThrow(fd, 0);
+ ResizeOrThrow(fd, size);
return MapOrThrow(size, true, kFileFlags, false, fd, 0);
}
diff --git a/util/portability.cc b/util/portability.cc
deleted file mode 100644
index 2efd74cba..000000000
--- a/util/portability.cc
+++ /dev/null
@@ -1,74 +0,0 @@
-
-#include <stdlib.h>
-#include <errno.h>
-#include "util/portability.hh"
-
-#ifdef WIN32
-
-int RUSAGE_SELF = 0;
-
-int sysconf(int) { return 0; }
-int msync(void*, int, int) { return 0; }
-int munmap(void *, int) { return 0; }
-void *mmap(void*, int, int, int, FD, OFF_T) { return 0; }
-int write(int, const void *, int) {return 0; }
-
-//FILE *popen(const char*, const char*) { return 0; }
-//int pclose(FILE *) { return 0; }
-int close(FD fd) { return 0; }
-
-
-// to be implemented by boost
-int mkdtemp(const char*) { return 0; }
-
-// done
-long lrint(float x)
-{
- long ret = (long) x;
- return ret;
-}
-
-float strtof(const char *begin, char **end)
-{
- double ret = strtod(begin, end);
- return (float) ret;
-}
-
-
-int ftruncate (FD hfile, unsigned int size)
-{
- unsigned int curpos;
- /*
- HANDLE hfile;
-
- if (fd < 0)
- {
- errno = EBADF;
- return -1;
- }
-
- hfile = (HANDLE) _get_osfhandle (fd);
- */
- curpos = SetFilePointer (hfile, 0, NULL, FILE_CURRENT);
- if (curpos == ~0
- || SetFilePointer (hfile, size, NULL, FILE_BEGIN) == ~0
- || !SetEndOfFile (hfile))
- {
- int error = GetLastError ();
- switch (error)
- {
- case ERROR_INVALID_HANDLE:
- errno = EBADF;
- break;
- default:
- errno = EIO;
- break;
- }
- return -1;
- }
- return 0;
-}
-
-#endif
-
-
diff --git a/util/portability.hh b/util/portability.hh
deleted file mode 100644
index acbc01922..000000000
--- a/util/portability.hh
+++ /dev/null
@@ -1,115 +0,0 @@
-
-#pragma once
-
-#include <assert.h>
-#include <stdint.h>
-
-#ifdef WIN32
-
-#include <windows.h>
-#include <direct.h>
-#include <io.h>
-#include <stdio.h>
-#include <string.h>
-#include <sys/stat.h>
-#include "util/getopt.hh"
-
-#undef max
-#undef min
-
-typedef HANDLE FD;
-
-const FD kBadFD = INVALID_HANDLE_VALUE;
-
-typedef int ssize_t;
-
-#define _SC_PAGE_SIZE 1
-#define MS_SYNC 1
-
-int sysconf(int);
-int msync(void*, int, int);
-int ftruncate(FD, unsigned int);
-
-long lrint(float);
-
-//inline int getrusage(int, struct rusage*) { return 0; }
-//extern int RUSAGE_SELF;
-
-typedef __int64 OFF_T;
-//#define OFF_T __int64
-
-#ifndef S_ISDIR
-#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
-#endif
-
-#ifndef S_ISREG
-#define S_ISREG(mode) (((mode) & S_IFMT) == S_IFREG)
-#endif
-
-int mkdtemp(const char*);
-int munmap(void *, int);
-void *mmap(void*, int, int, int, FD, OFF_T);
-
-#define PROT_READ 1
-#define PROT_WRITE 1
-#define MAP_FAILED (void*) 0x1
-#define MAP_SHARED 1
-#define MAP_ANON 1
-#define MAP_PRIVATE 1
-#define S_IRUSR 1
-#define S_IROTH 1
-#define S_IRGRP 1
-
-int write(int, const void *, int);
-#define S_IRUSR 1
-#define S_IWUSR 1
-
-//const char *strerror_r(int, const char *buf, int);
-
-float strtof(const char *begin, char **end);
-//FILE *popen(const char*, const char*);
-//int pclose(FILE *);
-int close(FD fd);
-
-#define dup(x) _dup(x)
-#define rmdir(x) _rmdir(x)
-#define strerror_r(errNum, buffer, numberOfElements) strerror_s(buffer, numberOfElements);
-
-#else // assume UNIX OS
-
-#include <stdint.h>
-#include <sys/resource.h>
-#include <sys/time.h>
-#include <sys/types.h>
-#include <sys/mman.h>
-#include <sys/stat.h>
-#include <unistd.h>
-
-typedef int FD;
-const FD kBadFD = -1;
-
-typedef off_t OFF_T;
-
-#endif
-
-#ifdef __GNUC__
-#define UTIL_FUNC_NAME __PRETTY_FUNCTION__
-#else
-#ifdef _WIN32
-#define UTIL_FUNC_NAME __FUNCTION__
-#else
-#define UTIL_FUNC_NAME NULL
-#endif
-#endif
-
-/* Bit-level packing routines */
-#ifdef __APPLE__
- #include <architecture/byte_order.h>
-#elif __linux__
- #include <endian.h>
-#elif WIN32
- // nothing
-#else
- #include <arpa/nameser_compat.h>
-#endif
-
diff --git a/util/probing_hash_table.hh b/util/probing_hash_table.hh
index 8122d69c5..f466cebc9 100644
--- a/util/probing_hash_table.hh
+++ b/util/probing_hash_table.hh
@@ -18,27 +18,33 @@ class ProbingSizeException : public Exception {
~ProbingSizeException() throw() {}
};
+// std::identity is an SGI extension :-(
+struct IdentityHash {
+ template <class T> T operator()(T arg) const { return arg; }
+};
+
/* Non-standard hash table
* Buckets must be set at the beginning and must be greater than maximum number
- * of elements, else an infinite loop happens.
+ * of elements, else it throws ProbingSizeException.
* Memory management and initialization is externalized to make it easier to
* serialize these to disk and load them quickly.
* Uses linear probing to find value.
* Only insert and lookup operations.
*/
-template <class PackingT, class HashT, class EqualT = std::equal_to<typename PackingT::Key> > class ProbingHashTable {
+template <class EntryT, class HashT, class EqualT = std::equal_to<typename EntryT::Key> > class ProbingHashTable {
public:
- typedef PackingT Packing;
- typedef typename Packing::Key Key;
- typedef typename Packing::MutableIterator MutableIterator;
- typedef typename Packing::ConstIterator ConstIterator;
-
+ typedef EntryT Entry;
+ typedef typename Entry::Key Key;
+ typedef const Entry *ConstIterator;
+ typedef Entry *MutableIterator;
typedef HashT Hash;
typedef EqualT Equal;
+ public:
static std::size_t Size(std::size_t entries, float multiplier) {
- return std::max(entries + 1, static_cast<std::size_t>(multiplier * static_cast<float>(entries))) * Packing::kBytes;
+ std::size_t buckets = std::max(entries + 1, static_cast<std::size_t>(multiplier * static_cast<float>(entries)));
+ return buckets * sizeof(Entry);
}
// Must be assigned to later.
@@ -49,9 +55,9 @@ template <class PackingT, class HashT, class EqualT = std::equal_to<typename Pac
{}
ProbingHashTable(void *start, std::size_t allocated, const Key &invalid = Key(), const Hash &hash_func = Hash(), const Equal &equal_func = Equal())
- : begin_(Packing::FromVoid(start)),
- buckets_(allocated / Packing::kBytes),
- end_(begin_ + (allocated / Packing::kBytes)),
+ : begin_(reinterpret_cast<MutableIterator>(start)),
+ buckets_(allocated / sizeof(Entry)),
+ end_(begin_ + buckets_),
invalid_(invalid),
hash_(hash_func),
equal_(equal_func),
@@ -62,11 +68,10 @@ template <class PackingT, class HashT, class EqualT = std::equal_to<typename Pac
{}
template <class T> MutableIterator Insert(const T &t) {
- if (++entries_ >= buckets_)
- UTIL_THROW(ProbingSizeException, "Hash table with " << buckets_ << " buckets is full.");
#ifdef DEBUG
assert(initialized_);
#endif
+ UTIL_THROW_IF(++entries_ >= buckets_, ProbingSizeException, "Hash table with " << buckets_ << " buckets is full.");
for (MutableIterator i(begin_ + (hash_(t.GetKey()) % buckets_));;) {
if (equal_(i->GetKey(), invalid_)) { *i = t; return i; }
if (++i == end_) { i = begin_; }
@@ -84,7 +89,7 @@ template <class PackingT, class HashT, class EqualT = std::equal_to<typename Pac
if (equal_(got, key)) { out = i; return true; }
if (equal_(got, invalid_)) return false;
if (++i == end_) i = begin_;
- }
+ }
}
template <class Key> bool Find(const Key key, ConstIterator &out) const {
diff --git a/util/probing_hash_table_test.cc b/util/probing_hash_table_test.cc
index ff2f5af31..ef68e5f22 100644
--- a/util/probing_hash_table_test.cc
+++ b/util/probing_hash_table_test.cc
@@ -1,6 +1,6 @@
#include "util/probing_hash_table.hh"
-#include "util/key_value_packing.hh"
+#include <stdint.h>
#define BOOST_TEST_MODULE ProbingHashTableTest
#include <boost/test/unit_test.hpp>
@@ -9,17 +9,34 @@
namespace util {
namespace {
-typedef AlignedPacking<char, uint64_t> Packing;
-typedef ProbingHashTable<Packing, boost::hash<char> > Table;
+struct Entry {
+ unsigned char key;
+ typedef unsigned char Key;
+
+ unsigned char GetKey() const {
+ return key;
+ }
+
+ uint64_t GetValue() const {
+ return value;
+ }
+
+ uint64_t value;
+};
+
+typedef ProbingHashTable<Entry, boost::hash<unsigned char> > Table;
BOOST_AUTO_TEST_CASE(simple) {
char mem[Table::Size(10, 1.2)];
memset(mem, 0, sizeof(mem));
Table table(mem, sizeof(mem));
- Packing::ConstIterator i = Packing::ConstIterator();
+ const Entry *i = NULL;
BOOST_CHECK(!table.Find(2, i));
- table.Insert(Packing::Make(3, 328920));
+ Entry to_ins;
+ to_ins.key = 3;
+ to_ins.value = 328920;
+ table.Insert(to_ins);
BOOST_REQUIRE(table.Find(3, i));
BOOST_CHECK_EQUAL(3, i->GetKey());
BOOST_CHECK_EQUAL(static_cast<uint64_t>(328920), i->GetValue());
diff --git a/util/sorted_uniform.hh b/util/sorted_uniform.hh
index 0391189f0..7700d9e64 100644
--- a/util/sorted_uniform.hh
+++ b/util/sorted_uniform.hh
@@ -122,99 +122,6 @@ template <class Iterator, class Accessor> Iterator BinaryBelow(
return begin - 1;
}
-// To use this template, you need to define a Pivot function to match Key.
-template <class PackingT> class SortedUniformMap {
- public:
- typedef PackingT Packing;
- typedef typename Packing::ConstIterator ConstIterator;
- typedef typename Packing::MutableIterator MutableIterator;
-
- struct Accessor {
- public:
- typedef typename Packing::Key Key;
- const Key &operator()(const ConstIterator &i) const { return i->GetKey(); }
- Key &operator()(const MutableIterator &i) const { return i->GetKey(); }
- };
-
- // Offer consistent API with probing hash.
- static std::size_t Size(std::size_t entries, float /*ignore*/ = 0.0) {
- return sizeof(uint64_t) + entries * Packing::kBytes;
- }
-
- SortedUniformMap()
-#ifdef DEBUG
- : initialized_(false), loaded_(false)
-#endif
- {}
-
- SortedUniformMap(void *start, std::size_t /*allocated*/) :
- begin_(Packing::FromVoid(reinterpret_cast<uint64_t*>(start) + 1)),
- end_(begin_), size_ptr_(reinterpret_cast<uint64_t*>(start))
-#ifdef DEBUG
- , initialized_(true), loaded_(false)
-#endif
- {}
-
- void LoadedBinary() {
-#ifdef DEBUG
- assert(initialized_);
- assert(!loaded_);
- loaded_ = true;
-#endif
- // Restore the size.
- end_ = begin_ + *size_ptr_;
- }
-
- // Caller responsible for not exceeding specified size. Do not call after FinishedInserting.
- template <class T> void Insert(const T &t) {
-#ifdef DEBUG
- assert(initialized_);
- assert(!loaded_);
-#endif
- *end_ = t;
- ++end_;
- }
-
- void FinishedInserting() {
-#ifdef DEBUG
- assert(initialized_);
- assert(!loaded_);
- loaded_ = true;
-#endif
- std::sort(begin_, end_);
- *size_ptr_ = (end_ - begin_);
- }
-
- // Don't use this to change the key.
- template <class Key> bool UnsafeMutableFind(const Key key, MutableIterator &out) {
-#ifdef DEBUG
- assert(initialized_);
- assert(loaded_);
-#endif
- return SortedUniformFind<MutableIterator, Accessor, Pivot64>(begin_, end_, key, out);
- }
-
- // Do not call before FinishedInserting.
- template <class Key> bool Find(const Key key, ConstIterator &out) const {
-#ifdef DEBUG
- assert(initialized_);
- assert(loaded_);
-#endif
- return SortedUniformFind<ConstIterator, Accessor, Pivot64>(Accessor(), ConstIterator(begin_), ConstIterator(end_), key, out);
- }
-
- ConstIterator begin() const { return begin_; }
- ConstIterator end() const { return end_; }
-
- private:
- typename Packing::MutableIterator begin_, end_;
- uint64_t *size_ptr_;
-#ifdef DEBUG
- bool initialized_;
- bool loaded_;
-#endif
-};
-
} // namespace util
#endif // UTIL_SORTED_UNIFORM__
diff --git a/util/sorted_uniform_test.cc b/util/sorted_uniform_test.cc
index 4aa4c8aad..ac7a0bfc5 100644
--- a/util/sorted_uniform_test.cc
+++ b/util/sorted_uniform_test.cc
@@ -1,7 +1,5 @@
#include "util/sorted_uniform.hh"
-#include "util/key_value_packing.hh"
-
#include <boost/random/mersenne_twister.hpp>
#include <boost/random/uniform_int.hpp>
#include <boost/random/variate_generator.hpp>
@@ -17,74 +15,86 @@
namespace util {
namespace {
-template <class Map, class Key, class Value> void Check(const Map &map, const boost::unordered_map<Key, Value> &reference, const Key key) {
+template <class KeyT, class ValueT> struct Entry {
+ typedef KeyT Key;
+ typedef ValueT Value;
+
+ Key key;
+ Value value;
+
+ Key GetKey() const {
+ return key;
+ }
+
+ Value GetValue() const {
+ return value;
+ }
+
+ bool operator<(const Entry<Key,Value> &other) const {
+ return key < other.key;
+ }
+};
+
+template <class KeyT> struct Accessor {
+ typedef KeyT Key;
+ template <class Value> Key operator()(const Entry<Key, Value> *entry) const {
+ return entry->GetKey();
+ }
+};
+
+template <class Key, class Value> void Check(const Entry<Key, Value> *begin, const Entry<Key, Value> *end, const boost::unordered_map<Key, Value> &reference, const Key key) {
typename boost::unordered_map<Key, Value>::const_iterator ref = reference.find(key);
- typename Map::ConstIterator i = typename Map::ConstIterator();
+ typedef const Entry<Key, Value> *It;
+ // g++ can't tell that require will crash and burn.
+ It i = NULL;
+ bool ret = SortedUniformFind<It, Accessor<Key>, Pivot64>(Accessor<Key>(), begin, end, key, i);
if (ref == reference.end()) {
- BOOST_CHECK(!map.Find(key, i));
+ BOOST_CHECK(!ret);
} else {
- // g++ can't tell that require will crash and burn.
- BOOST_REQUIRE(map.Find(key, i));
+ BOOST_REQUIRE(ret);
BOOST_CHECK_EQUAL(ref->second, i->GetValue());
}
}
-typedef SortedUniformMap<AlignedPacking<uint64_t, uint32_t> > TestMap;
-
BOOST_AUTO_TEST_CASE(empty) {
- char buf[TestMap::Size(0)];
- TestMap map(buf, TestMap::Size(0));
- map.FinishedInserting();
- TestMap::ConstIterator i;
- BOOST_CHECK(!map.Find(42, i));
-}
-
-BOOST_AUTO_TEST_CASE(one) {
- char buf[TestMap::Size(1)];
- TestMap map(buf, sizeof(buf));
- Entry<uint64_t, uint32_t> e;
- e.Set(42,2);
- map.Insert(e);
- map.FinishedInserting();
- TestMap::ConstIterator i = TestMap::ConstIterator();
- BOOST_REQUIRE(map.Find(42, i));
- BOOST_CHECK(i == map.begin());
- BOOST_CHECK(!map.Find(43, i));
- BOOST_CHECK(!map.Find(41, i));
+ typedef const Entry<uint64_t, float> T;
+ const T *i;
+ bool ret = SortedUniformFind<const T*, Accessor<uint64_t>, Pivot64>(Accessor<uint64_t>(), (const T*)NULL, (const T*)NULL, (uint64_t)10, i);
+ BOOST_CHECK(!ret);
}
template <class Key> void RandomTest(Key upper, size_t entries, size_t queries) {
typedef unsigned char Value;
- typedef SortedUniformMap<AlignedPacking<Key, unsigned char> > Map;
- boost::scoped_array<char> buffer(new char[Map::Size(entries)]);
- Map map(buffer.get(), entries);
boost::mt19937 rng;
boost::uniform_int<Key> range_key(0, upper);
boost::uniform_int<Value> range_value(0, 255);
boost::variate_generator<boost::mt19937&, boost::uniform_int<Key> > gen_key(rng, range_key);
boost::variate_generator<boost::mt19937&, boost::uniform_int<unsigned char> > gen_value(rng, range_value);
+ typedef Entry<Key, Value> Ent;
+ std::vector<Ent> backing;
boost::unordered_map<Key, unsigned char> reference;
- Entry<Key, unsigned char> ent;
+ Ent ent;
for (size_t i = 0; i < entries; ++i) {
Key key = gen_key();
unsigned char value = gen_value();
if (reference.insert(std::make_pair(key, value)).second) {
- ent.Set(key, value);
- map.Insert(Entry<Key, unsigned char>(ent));
+ ent.key = key;
+ ent.value = value;
+ backing.push_back(ent);
}
}
- map.FinishedInserting();
+ std::sort(backing.begin(), backing.end());
// Random queries.
for (size_t i = 0; i < queries; ++i) {
const Key key = gen_key();
- Check<Map, Key, unsigned char>(map, reference, key);
+ Check<Key, unsigned char>(&*backing.begin(), &*backing.end(), reference, key);
}
typename boost::unordered_map<Key, unsigned char>::const_iterator it = reference.begin();
for (size_t i = 0; (i < queries) && (it != reference.end()); ++i, ++it) {
- Check<Map, Key, unsigned char>(map, reference, it->second);
+ Check<Key, unsigned char>(&*backing.begin(), &*backing.end(), reference, it->second);
}
}
diff --git a/util/tokenize_piece.hh b/util/tokenize_piece.hh
index 413bda0b9..c7e1c8633 100644
--- a/util/tokenize_piece.hh
+++ b/util/tokenize_piece.hh
@@ -1,6 +1,7 @@
#ifndef UTIL_TOKENIZE_PIECE__
#define UTIL_TOKENIZE_PIECE__
+#include "util/exception.hh"
#include "util/string_piece.hh"
#include <boost/iterator/iterator_facade.hpp>
@@ -8,63 +9,25 @@
#include <algorithm>
#include <iostream>
-/* Usage:
- *
- * for (PieceIterator<' '> i(" foo \r\n bar "); i; ++i) {
- * std::cout << *i << "\n";
- * }
- *
- */
-
namespace util {
-// Tokenize a StringPiece using an iterator interface. boost::tokenizer doesn't work with StringPiece.
-template <char d> class PieceIterator : public boost::iterator_facade<PieceIterator<d>, const StringPiece, boost::forward_traversal_tag> {
+// Thrown on dereference when out of tokens to parse
+class OutOfTokens : public Exception {
public:
- // Default construct is end, which is also accessed by kEndPieceIterator;
- PieceIterator() {}
-
- explicit PieceIterator(const StringPiece &str)
- : after_(str) {
- increment();
- }
+ OutOfTokens() throw() {}
+ ~OutOfTokens() throw() {}
+};
- bool operator!() const {
- return after_.data() == 0;
- }
- operator bool() const {
- return after_.data() != 0;
- }
+class SingleCharacter {
+ public:
+ explicit SingleCharacter(char delim) : delim_(delim) {}
- static PieceIterator<d> end() {
- return PieceIterator<d>();
+ StringPiece Find(const StringPiece &in) const {
+ return StringPiece(std::find(in.data(), in.data() + in.size(), delim_), 1);
}
private:
- friend class boost::iterator_core_access;
-
- void increment() {
- const char *start = after_.data();
- for (; (start != after_.data() + after_.size()) && (d == *start); ++start) {}
- if (start == after_.data() + after_.size()) {
- // End condition.
- after_.clear();
- return;
- }
- const char *finish = start;
- for (; (finish != after_.data() + after_.size()) && (d != *finish); ++finish) {}
- current_ = StringPiece(start, finish - start);
- after_ = StringPiece(finish, after_.data() + after_.size() - finish);
- }
-
- bool equal(const PieceIterator &other) const {
- return after_.data() == other.after_.data();
- }
-
- const StringPiece &dereference() const { return current_; }
-
- StringPiece current_;
- StringPiece after_;
+ char delim_;
};
class MultiCharacter {
@@ -95,7 +58,7 @@ template <class Find, bool SkipEmpty = false> class TokenIter : public boost::it
public:
TokenIter() {}
- TokenIter(const StringPiece &str, const Find &finder) : after_(str), finder_(finder) {
+ template <class Construct> TokenIter(const StringPiece &str, const Construct &construct) : after_(str), finder_(construct) {
increment();
}
@@ -130,6 +93,7 @@ template <class Find, bool SkipEmpty = false> class TokenIter : public boost::it
}
const StringPiece &dereference() const {
+ UTIL_THROW_IF(!current_.data(), OutOfTokens, "Ran out of tokens");
return current_;
}
diff --git a/util/tokenize_piece_test.cc b/util/tokenize_piece_test.cc
index e07ebcf5e..d856018fb 100644
--- a/util/tokenize_piece_test.cc
+++ b/util/tokenize_piece_test.cc
@@ -9,53 +9,7 @@
namespace util {
namespace {
-BOOST_AUTO_TEST_CASE(simple) {
- PieceIterator<' '> it("single spaced words.");
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("single"), *it);
- ++it;
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("spaced"), *it);
- ++it;
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("words."), *it);
- ++it;
- BOOST_CHECK(!it);
-}
-
-BOOST_AUTO_TEST_CASE(null_delimiter) {
- const char str[] = "\0first\0\0second\0\0\0third\0fourth\0\0\0";
- PieceIterator<'\0'> it(StringPiece(str, sizeof(str) - 1));
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("first"), *it);
- ++it;
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("second"), *it);
- ++it;
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("third"), *it);
- ++it;
- BOOST_REQUIRE(it);
- BOOST_CHECK_EQUAL(StringPiece("fourth"), *it);
- ++it;
- BOOST_CHECK(!it);
-}
-
-BOOST_AUTO_TEST_CASE(null_entries) {
- const char str[] = "\0split\0\0 \0me\0 ";
- PieceIterator<' '> it(StringPiece(str, sizeof(str) - 1));
- BOOST_REQUIRE(it);
- const char first[] = "\0split\0\0";
- BOOST_CHECK_EQUAL(StringPiece(first, sizeof(first) - 1), *it);
- ++it;
- BOOST_REQUIRE(it);
- const char second[] = "\0me\0";
- BOOST_CHECK_EQUAL(StringPiece(second, sizeof(second) - 1), *it);
- ++it;
- BOOST_CHECK(!it);
-}
-
-/*BOOST_AUTO_TEST_CASE(pipe_pipe_none) {
+BOOST_AUTO_TEST_CASE(pipe_pipe_none) {
const char str[] = "nodelimit at all";
TokenIter<MultiCharacter> it(str, MultiCharacter("|||"));
BOOST_REQUIRE(it);
@@ -79,7 +33,7 @@ BOOST_AUTO_TEST_CASE(remove_empty) {
const char str[] = "|||";
TokenIter<MultiCharacter, true> it(str, MultiCharacter("|||"));
BOOST_CHECK(!it);
-}*/
+}
BOOST_AUTO_TEST_CASE(remove_empty_keep) {
const char str[] = " |||";
diff --git a/util/util.xcodeproj/project.pbxproj b/util/util.xcodeproj/project.pbxproj
new file mode 100644
index 000000000..2f8134a39
--- /dev/null
+++ b/util/util.xcodeproj/project.pbxproj
@@ -0,0 +1,298 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1EE8C2A41476A2E9002496F2 /* bit_packing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C27C1476A2E9002496F2 /* bit_packing.cc */; };
+ 1EE8C2A51476A2EA002496F2 /* bit_packing.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C27D1476A2E9002496F2 /* bit_packing.hh */; };
+ 1EE8C2A61476A2EA002496F2 /* check.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C27E1476A2E9002496F2 /* check.hh */; };
+ 1EE8C2A71476A2EA002496F2 /* ersatz_progress.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2811476A2E9002496F2 /* ersatz_progress.cc */; };
+ 1EE8C2A81476A2EA002496F2 /* ersatz_progress.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2821476A2E9002496F2 /* ersatz_progress.hh */; };
+ 1EE8C2A91476A2EA002496F2 /* exception.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2831476A2E9002496F2 /* exception.cc */; };
+ 1EE8C2AA1476A2EA002496F2 /* exception.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2841476A2E9002496F2 /* exception.hh */; };
+ 1EE8C2AC1476A2EA002496F2 /* file_piece.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2861476A2E9002496F2 /* file_piece.cc */; };
+ 1EE8C2AD1476A2EA002496F2 /* file_piece.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2871476A2E9002496F2 /* file_piece.hh */; };
+ 1EE8C2AE1476A2EA002496F2 /* file.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2881476A2E9002496F2 /* file.cc */; };
+ 1EE8C2AF1476A2EA002496F2 /* file.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2891476A2E9002496F2 /* file.hh */; };
+ 1EE8C2B01476A2EA002496F2 /* getopt.c in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C28A1476A2E9002496F2 /* getopt.c */; };
+ 1EE8C2B11476A2EA002496F2 /* getopt.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C28B1476A2E9002496F2 /* getopt.hh */; };
+ 1EE8C2B21476A2EA002496F2 /* have.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C28C1476A2E9002496F2 /* have.hh */; };
+ 1EE8C2B41476A2EA002496F2 /* joint_sort.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C28E1476A2E9002496F2 /* joint_sort.hh */; };
+ 1EE8C2B61476A2EA002496F2 /* key_value_packing.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2901476A2E9002496F2 /* key_value_packing.hh */; };
+ 1EE8C2B81476A2EA002496F2 /* mmap.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2951476A2E9002496F2 /* mmap.cc */; };
+ 1EE8C2B91476A2EA002496F2 /* mmap.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2961476A2E9002496F2 /* mmap.hh */; };
+ 1EE8C2BA1476A2EA002496F2 /* murmur_hash.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1EE8C2971476A2E9002496F2 /* murmur_hash.cc */; };
+ 1EE8C2BB1476A2EA002496F2 /* murmur_hash.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2981476A2E9002496F2 /* murmur_hash.hh */; };
+ 1EE8C2BD1476A2EA002496F2 /* probing_hash_table.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C29A1476A2E9002496F2 /* probing_hash_table.hh */; };
+ 1EE8C2BE1476A2EA002496F2 /* proxy_iterator.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C29B1476A2E9002496F2 /* proxy_iterator.hh */; };
+ 1EE8C2BF1476A2EA002496F2 /* scoped.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C29C1476A2E9002496F2 /* scoped.hh */; };
+ 1EE8C2C01476A2EA002496F2 /* sized_iterator.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C29D1476A2E9002496F2 /* sized_iterator.hh */; };
+ 1EE8C2C21476A2EA002496F2 /* sorted_uniform.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C29F1476A2E9002496F2 /* sorted_uniform.hh */; };
+ 1EE8C2C31476A2EA002496F2 /* string_piece.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2A01476A2E9002496F2 /* string_piece.hh */; };
+ 1EE8C2C51476A2EA002496F2 /* tokenize_piece.hh in Headers */ = {isa = PBXBuildFile; fileRef = 1EE8C2A21476A2E9002496F2 /* tokenize_piece.hh */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 1EE8C2711476A262002496F2 /* libutil.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libutil.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ 1EE8C27C1476A2E9002496F2 /* bit_packing.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = bit_packing.cc; sourceTree = "<group>"; };
+ 1EE8C27D1476A2E9002496F2 /* bit_packing.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = bit_packing.hh; sourceTree = "<group>"; };
+ 1EE8C27E1476A2E9002496F2 /* check.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = check.hh; sourceTree = "<group>"; };
+ 1EE8C2811476A2E9002496F2 /* ersatz_progress.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ersatz_progress.cc; sourceTree = "<group>"; };
+ 1EE8C2821476A2E9002496F2 /* ersatz_progress.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = ersatz_progress.hh; sourceTree = "<group>"; };
+ 1EE8C2831476A2E9002496F2 /* exception.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = exception.cc; sourceTree = "<group>"; };
+ 1EE8C2841476A2E9002496F2 /* exception.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = exception.hh; sourceTree = "<group>"; };
+ 1EE8C2861476A2E9002496F2 /* file_piece.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = file_piece.cc; sourceTree = "<group>"; };
+ 1EE8C2871476A2E9002496F2 /* file_piece.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = file_piece.hh; sourceTree = "<group>"; };
+ 1EE8C2881476A2E9002496F2 /* file.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = file.cc; sourceTree = "<group>"; };
+ 1EE8C2891476A2E9002496F2 /* file.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = file.hh; sourceTree = "<group>"; };
+ 1EE8C28A1476A2E9002496F2 /* getopt.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = getopt.c; sourceTree = "<group>"; };
+ 1EE8C28B1476A2E9002496F2 /* getopt.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = getopt.hh; sourceTree = "<group>"; };
+ 1EE8C28C1476A2E9002496F2 /* have.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = have.hh; sourceTree = "<group>"; };
+ 1EE8C28E1476A2E9002496F2 /* joint_sort.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = joint_sort.hh; sourceTree = "<group>"; };
+ 1EE8C2901476A2E9002496F2 /* key_value_packing.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = key_value_packing.hh; sourceTree = "<group>"; };
+ 1EE8C2951476A2E9002496F2 /* mmap.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = mmap.cc; sourceTree = "<group>"; };
+ 1EE8C2961476A2E9002496F2 /* mmap.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = mmap.hh; sourceTree = "<group>"; };
+ 1EE8C2971476A2E9002496F2 /* murmur_hash.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = murmur_hash.cc; sourceTree = "<group>"; };
+ 1EE8C2981476A2E9002496F2 /* murmur_hash.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = murmur_hash.hh; sourceTree = "<group>"; };
+ 1EE8C29A1476A2E9002496F2 /* probing_hash_table.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = probing_hash_table.hh; sourceTree = "<group>"; };
+ 1EE8C29B1476A2E9002496F2 /* proxy_iterator.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = proxy_iterator.hh; sourceTree = "<group>"; };
+ 1EE8C29C1476A2E9002496F2 /* scoped.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = scoped.hh; sourceTree = "<group>"; };
+ 1EE8C29D1476A2E9002496F2 /* sized_iterator.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = sized_iterator.hh; sourceTree = "<group>"; };
+ 1EE8C29F1476A2E9002496F2 /* sorted_uniform.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = sorted_uniform.hh; sourceTree = "<group>"; };
+ 1EE8C2A01476A2E9002496F2 /* string_piece.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = string_piece.hh; sourceTree = "<group>"; };
+ 1EE8C2A21476A2E9002496F2 /* tokenize_piece.hh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = tokenize_piece.hh; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 1EE8C26E1476A262002496F2 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 1EE8C2661476A262002496F2 = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C27C1476A2E9002496F2 /* bit_packing.cc */,
+ 1EE8C27D1476A2E9002496F2 /* bit_packing.hh */,
+ 1EE8C27E1476A2E9002496F2 /* check.hh */,
+ 1EE8C2811476A2E9002496F2 /* ersatz_progress.cc */,
+ 1EE8C2821476A2E9002496F2 /* ersatz_progress.hh */,
+ 1EE8C2831476A2E9002496F2 /* exception.cc */,
+ 1EE8C2841476A2E9002496F2 /* exception.hh */,
+ 1EE8C2861476A2E9002496F2 /* file_piece.cc */,
+ 1EE8C2871476A2E9002496F2 /* file_piece.hh */,
+ 1EE8C2881476A2E9002496F2 /* file.cc */,
+ 1EE8C2891476A2E9002496F2 /* file.hh */,
+ 1EE8C28A1476A2E9002496F2 /* getopt.c */,
+ 1EE8C28B1476A2E9002496F2 /* getopt.hh */,
+ 1EE8C28C1476A2E9002496F2 /* have.hh */,
+ 1EE8C28E1476A2E9002496F2 /* joint_sort.hh */,
+ 1EE8C2901476A2E9002496F2 /* key_value_packing.hh */,
+ 1EE8C2951476A2E9002496F2 /* mmap.cc */,
+ 1EE8C2961476A2E9002496F2 /* mmap.hh */,
+ 1EE8C2971476A2E9002496F2 /* murmur_hash.cc */,
+ 1EE8C2981476A2E9002496F2 /* murmur_hash.hh */,
+ 1EE8C29A1476A2E9002496F2 /* probing_hash_table.hh */,
+ 1EE8C29B1476A2E9002496F2 /* proxy_iterator.hh */,
+ 1EE8C29C1476A2E9002496F2 /* scoped.hh */,
+ 1EE8C29D1476A2E9002496F2 /* sized_iterator.hh */,
+ 1EE8C29F1476A2E9002496F2 /* sorted_uniform.hh */,
+ 1EE8C2A01476A2E9002496F2 /* string_piece.hh */,
+ 1EE8C2A21476A2E9002496F2 /* tokenize_piece.hh */,
+ 1EE8C2721476A262002496F2 /* Products */,
+ );
+ sourceTree = "<group>";
+ };
+ 1EE8C2721476A262002496F2 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1EE8C2711476A262002496F2 /* libutil.a */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXHeadersBuildPhase section */
+ 1EE8C26F1476A262002496F2 /* Headers */ = {
+ isa = PBXHeadersBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1EE8C2A51476A2EA002496F2 /* bit_packing.hh in Headers */,
+ 1EE8C2A61476A2EA002496F2 /* check.hh in Headers */,
+ 1EE8C2A81476A2EA002496F2 /* ersatz_progress.hh in Headers */,
+ 1EE8C2AA1476A2EA002496F2 /* exception.hh in Headers */,
+ 1EE8C2AD1476A2EA002496F2 /* file_piece.hh in Headers */,
+ 1EE8C2AF1476A2EA002496F2 /* file.hh in Headers */,
+ 1EE8C2B11476A2EA002496F2 /* getopt.hh in Headers */,
+ 1EE8C2B21476A2EA002496F2 /* have.hh in Headers */,
+ 1EE8C2B41476A2EA002496F2 /* joint_sort.hh in Headers */,
+ 1EE8C2B61476A2EA002496F2 /* key_value_packing.hh in Headers */,
+ 1EE8C2B91476A2EA002496F2 /* mmap.hh in Headers */,
+ 1EE8C2BB1476A2EA002496F2 /* murmur_hash.hh in Headers */,
+ 1EE8C2BD1476A2EA002496F2 /* probing_hash_table.hh in Headers */,
+ 1EE8C2BE1476A2EA002496F2 /* proxy_iterator.hh in Headers */,
+ 1EE8C2BF1476A2EA002496F2 /* scoped.hh in Headers */,
+ 1EE8C2C01476A2EA002496F2 /* sized_iterator.hh in Headers */,
+ 1EE8C2C21476A2EA002496F2 /* sorted_uniform.hh in Headers */,
+ 1EE8C2C31476A2EA002496F2 /* string_piece.hh in Headers */,
+ 1EE8C2C51476A2EA002496F2 /* tokenize_piece.hh in Headers */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXHeadersBuildPhase section */
+
+/* Begin PBXNativeTarget section */
+ 1EE8C2701476A262002496F2 /* util */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 1EE8C2751476A262002496F2 /* Build configuration list for PBXNativeTarget "util" */;
+ buildPhases = (
+ 1EE8C26D1476A262002496F2 /* Sources */,
+ 1EE8C26E1476A262002496F2 /* Frameworks */,
+ 1EE8C26F1476A262002496F2 /* Headers */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = util;
+ productName = util;
+ productReference = 1EE8C2711476A262002496F2 /* libutil.a */;
+ productType = "com.apple.product-type.library.static";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 1EE8C2681476A262002496F2 /* Project object */ = {
+ isa = PBXProject;
+ buildConfigurationList = 1EE8C26B1476A262002496F2 /* Build configuration list for PBXProject "util" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ );
+ mainGroup = 1EE8C2661476A262002496F2;
+ productRefGroup = 1EE8C2721476A262002496F2 /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 1EE8C2701476A262002496F2 /* util */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 1EE8C26D1476A262002496F2 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1EE8C2A41476A2E9002496F2 /* bit_packing.cc in Sources */,
+ 1EE8C2A71476A2EA002496F2 /* ersatz_progress.cc in Sources */,
+ 1EE8C2A91476A2EA002496F2 /* exception.cc in Sources */,
+ 1EE8C2AC1476A2EA002496F2 /* file_piece.cc in Sources */,
+ 1EE8C2AE1476A2EA002496F2 /* file.cc in Sources */,
+ 1EE8C2B01476A2EA002496F2 /* getopt.c in Sources */,
+ 1EE8C2B81476A2EA002496F2 /* mmap.cc in Sources */,
+ 1EE8C2BA1476A2EA002496F2 /* murmur_hash.cc in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+ 1EE8C2731476A262002496F2 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ARCHS = "$(ARCHS_STANDARD_64_BIT)";
+ COPY_PHASE_STRIP = NO;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_ENABLE_OBJC_EXCEPTIONS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_SYMBOLS_PRIVATE_EXTERN = NO;
+ GCC_VERSION = com.apple.compilers.llvm.clang.1_0;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.7;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = macosx;
+ };
+ name = Debug;
+ };
+ 1EE8C2741476A262002496F2 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ARCHS = "$(ARCHS_STANDARD_64_BIT)";
+ COPY_PHASE_STRIP = YES;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_ENABLE_OBJC_EXCEPTIONS = YES;
+ GCC_VERSION = com.apple.compilers.llvm.clang.1_0;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.7;
+ SDKROOT = macosx;
+ };
+ name = Release;
+ };
+ 1EE8C2761476A262002496F2 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ EXECUTABLE_PREFIX = lib;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ 1EE8C2771476A262002496F2 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ EXECUTABLE_PREFIX = lib;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 1EE8C26B1476A262002496F2 /* Build configuration list for PBXProject "util" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1EE8C2731476A262002496F2 /* Debug */,
+ 1EE8C2741476A262002496F2 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 1EE8C2751476A262002496F2 /* Build configuration list for PBXNativeTarget "util" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1EE8C2761476A262002496F2 /* Debug */,
+ 1EE8C2771476A262002496F2 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 1EE8C2681476A262002496F2 /* Project object */;
+}
diff --git a/validate_more_revisions.sh b/validate_more_revisions.sh
deleted file mode 100755
index 86a914dbe..000000000
--- a/validate_more_revisions.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-from=$1
-to=$2
-
-logdir=./revision_status_log
-
-if [ "$from" == "" ] || [ "$to" == "" ]; then
- cat <<KONEC
-./validate_more_revisions.sh <fromrev> <torev>
- will run ./validate_revision.sh for a sequence of revisions and
- collect logfiles to "$logdir"
-KONEC
- exit 1
-fi
-
-
-mkdir -p $logdir
-
-tmpdir=/tmp/validate-more-revisions-tmp
-if [ -e $tmpdir ]; then
- echo "$0 seems to be already running!"
- echo "If this is a false alarm, remove our temp directory:"
- echo " rm -rf $tmpdir"
- exit 1;
-fi
-
-mkdir -p $tmpdir
-
-# need to save a copy of the helper script validate_revision.sh,
-# because previous releases might have missed it
-cp ./validate_revision.sh $tmpdir/
-
-for i in `seq $from $to`; do
- echo "Validating $i...";
- $tmpdir/validate_revision.sh $i > $logdir/$i.log 2>&1
- tail -1 $logdir/$i.log | sed 's/^/ /'
-done
-rm -rf $tmpdir
-echo "Finished validating, now at revision $to"
diff --git a/validate_revision.sh b/validate_revision.sh
deleted file mode 100755
index a29eeed7c..000000000
--- a/validate_revision.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-rev="$1"
-
-if [ "$rev" == "" ]; then
- cat << KONEC
-./validate_revision.sh <revnumber>
- This will check, if the given revision was compilable (using irstlm).
- These tasks will be performed:
- svn update -r <revnumber>
- compile and install irstlm to a temp directory
- compile moses with irstlm
- delete the temp directory
-KONEC
- exit 1;
-fi
-
-tempdir=/tmp/validatemoses
-
-function die() {
- rm -rf $tempdir
- echo "$@"
- exit 1
-}
-
-if svn status | grep '^[^\?]'; then
- die "Will not go to a different revision, please synchronize with a revision in repository first"
-fi
-
-svn up -r $rev || die "Failed to update to rev. $rev"
-# dump the information
-svn info
-
-./regenerate-makefiles.sh || die "Failed to regenerate makefiles in mosesdecoder"
-
-
-cd irstlm || die "Failed to chdir to irstlm"
-./regenerate-makefiles.sh || die "Failed to regenerate makefiles in irstlm"
-./configure --prefix=$tempdir/irstlm || die "Failed to configure irstlm"
-make clean || die "Failed to clean irstlm"
-make || die "Failed to compile irstlm"
-make install || die "Failed to install irstlm"
-cd ..
-
-./configure --with-irstlm=$tempdir/irstlm || die "Failed to configure moses"
-make clean || die "Failed to clean moses"
-make || die "Failed to compile moses"
-
-rm -rf $tempdir || die "Failed to remove tempdir $tempdir"
-echo "Moses successfully compiled"