aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.def10
-rw-r--r--Makefile.in1146
-rwxr-xr-xconfigure2
-rw-r--r--configure.ac2
-rw-r--r--gcc/Makefile.in8
-rw-r--r--libcody/CMakeLists.txt121
-rw-r--r--libcody/CODING.md115
-rw-r--r--libcody/CONTRIB.md10
-rw-r--r--libcody/LICENSE201
-rw-r--r--libcody/LICENSE.gcc29
-rw-r--r--libcody/Makefile.in187
-rw-r--r--libcody/Makesub.in48
-rw-r--r--libcody/README.md497
-rw-r--r--libcody/buffer.cc387
-rwxr-xr-xlibcody/build-aux/config.guess1476
-rwxr-xr-xlibcody/build-aux/config.sub1833
-rwxr-xr-xlibcody/build-aux/install-sh518
-rw-r--r--libcody/client.cc336
-rw-r--r--libcody/cmake/libcody-config-ix.cmake43
-rw-r--r--libcody/cody.hh800
-rw-r--r--libcody/config.h.in29
-rw-r--r--libcody/config.m4280
-rwxr-xr-xlibcody/configure4201
-rw-r--r--libcody/configure.ac37
-rw-r--r--libcody/dox.cfg.in2478
-rw-r--r--libcody/fatal.cc78
-rw-r--r--libcody/gdbinit.in11
-rw-r--r--libcody/internal.hh125
-rw-r--r--libcody/netclient.cc140
-rw-r--r--libcody/netserver.cc153
-rw-r--r--libcody/packet.cc50
-rw-r--r--libcody/resolver.cc209
-rw-r--r--libcody/server.cc306
-rw-r--r--libcody/tests/01-serialize/connect.cc30
-rw-r--r--libcody/tests/01-serialize/decoder.cc73
-rw-r--r--libcody/tests/01-serialize/encoder.cc48
-rw-r--r--libcody/tests/02-comms/client-1.cc97
-rw-r--r--libcody/tests/02-comms/pivot-1.cc76
-rw-r--r--libcody/tests/02-comms/server-1.cc68
-rw-r--r--libcody/tests/Makesub.in36
-rwxr-xr-xlibcody/tests/jouster11
41 files changed, 16301 insertions, 4 deletions
diff --git a/Makefile.def b/Makefile.def
index 36fd26b..3bb3a81 100644
--- a/Makefile.def
+++ b/Makefile.def
@@ -81,6 +81,15 @@ host_modules= { module= itcl; };
host_modules= { module= ld; bootstrap=true; };
host_modules= { module= libbacktrace; bootstrap=true; };
host_modules= { module= libcpp; bootstrap=true; };
+// As with libiconv, don't install any of libcody
+host_modules= { module= libcody; bootstrap=true;
+ no_install= true;
+ missing= pdf;
+ missing= html;
+ missing= info;
+ missing= install-pdf;
+ missing= install-html;
+ missing= install-info; };
host_modules= { module= libdecnumber; bootstrap=true; };
host_modules= { module= libgui; };
host_modules= { module= libiberty; bootstrap=true;
@@ -347,6 +356,7 @@ dependencies = { module=all-gcc; on=all-build-libcpp; };
dependencies = { module=all-gcc; on=all-zlib; };
dependencies = { module=all-gcc; on=all-libbacktrace; hard=true; };
dependencies = { module=all-gcc; on=all-libcpp; hard=true; };
+dependencies = { module=all-gcc; on=all-libcody; hard=true; };
dependencies = { module=all-gcc; on=all-libdecnumber; hard=true; };
dependencies = { module=all-gcc; on=all-libiberty; };
dependencies = { module=all-gcc; on=all-fixincludes; };
diff --git a/Makefile.in b/Makefile.in
index 36e369d..d9d2d1d 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -1017,6 +1017,7 @@ configure-host: \
maybe-configure-ld \
maybe-configure-libbacktrace \
maybe-configure-libcpp \
+ maybe-configure-libcody \
maybe-configure-libdecnumber \
maybe-configure-libgui \
maybe-configure-libiberty \
@@ -1163,6 +1164,9 @@ all-host: maybe-all-libbacktrace
@if libcpp-no-bootstrap
all-host: maybe-all-libcpp
@endif libcpp-no-bootstrap
+@if libcody-no-bootstrap
+all-host: maybe-all-libcody
+@endif libcody-no-bootstrap
@if libdecnumber-no-bootstrap
all-host: maybe-all-libdecnumber
@endif libdecnumber-no-bootstrap
@@ -1281,6 +1285,7 @@ info-host: maybe-info-itcl
info-host: maybe-info-ld
info-host: maybe-info-libbacktrace
info-host: maybe-info-libcpp
+info-host: maybe-info-libcody
info-host: maybe-info-libdecnumber
info-host: maybe-info-libgui
info-host: maybe-info-libiberty
@@ -1370,6 +1375,7 @@ dvi-host: maybe-dvi-itcl
dvi-host: maybe-dvi-ld
dvi-host: maybe-dvi-libbacktrace
dvi-host: maybe-dvi-libcpp
+dvi-host: maybe-dvi-libcody
dvi-host: maybe-dvi-libdecnumber
dvi-host: maybe-dvi-libgui
dvi-host: maybe-dvi-libiberty
@@ -1459,6 +1465,7 @@ pdf-host: maybe-pdf-itcl
pdf-host: maybe-pdf-ld
pdf-host: maybe-pdf-libbacktrace
pdf-host: maybe-pdf-libcpp
+pdf-host: maybe-pdf-libcody
pdf-host: maybe-pdf-libdecnumber
pdf-host: maybe-pdf-libgui
pdf-host: maybe-pdf-libiberty
@@ -1548,6 +1555,7 @@ html-host: maybe-html-itcl
html-host: maybe-html-ld
html-host: maybe-html-libbacktrace
html-host: maybe-html-libcpp
+html-host: maybe-html-libcody
html-host: maybe-html-libdecnumber
html-host: maybe-html-libgui
html-host: maybe-html-libiberty
@@ -1637,6 +1645,7 @@ TAGS-host: maybe-TAGS-itcl
TAGS-host: maybe-TAGS-ld
TAGS-host: maybe-TAGS-libbacktrace
TAGS-host: maybe-TAGS-libcpp
+TAGS-host: maybe-TAGS-libcody
TAGS-host: maybe-TAGS-libdecnumber
TAGS-host: maybe-TAGS-libgui
TAGS-host: maybe-TAGS-libiberty
@@ -1726,6 +1735,7 @@ install-info-host: maybe-install-info-itcl
install-info-host: maybe-install-info-ld
install-info-host: maybe-install-info-libbacktrace
install-info-host: maybe-install-info-libcpp
+install-info-host: maybe-install-info-libcody
install-info-host: maybe-install-info-libdecnumber
install-info-host: maybe-install-info-libgui
install-info-host: maybe-install-info-libiberty
@@ -1815,6 +1825,7 @@ install-pdf-host: maybe-install-pdf-itcl
install-pdf-host: maybe-install-pdf-ld
install-pdf-host: maybe-install-pdf-libbacktrace
install-pdf-host: maybe-install-pdf-libcpp
+install-pdf-host: maybe-install-pdf-libcody
install-pdf-host: maybe-install-pdf-libdecnumber
install-pdf-host: maybe-install-pdf-libgui
install-pdf-host: maybe-install-pdf-libiberty
@@ -1904,6 +1915,7 @@ install-html-host: maybe-install-html-itcl
install-html-host: maybe-install-html-ld
install-html-host: maybe-install-html-libbacktrace
install-html-host: maybe-install-html-libcpp
+install-html-host: maybe-install-html-libcody
install-html-host: maybe-install-html-libdecnumber
install-html-host: maybe-install-html-libgui
install-html-host: maybe-install-html-libiberty
@@ -1993,6 +2005,7 @@ installcheck-host: maybe-installcheck-itcl
installcheck-host: maybe-installcheck-ld
installcheck-host: maybe-installcheck-libbacktrace
installcheck-host: maybe-installcheck-libcpp
+installcheck-host: maybe-installcheck-libcody
installcheck-host: maybe-installcheck-libdecnumber
installcheck-host: maybe-installcheck-libgui
installcheck-host: maybe-installcheck-libiberty
@@ -2082,6 +2095,7 @@ mostlyclean-host: maybe-mostlyclean-itcl
mostlyclean-host: maybe-mostlyclean-ld
mostlyclean-host: maybe-mostlyclean-libbacktrace
mostlyclean-host: maybe-mostlyclean-libcpp
+mostlyclean-host: maybe-mostlyclean-libcody
mostlyclean-host: maybe-mostlyclean-libdecnumber
mostlyclean-host: maybe-mostlyclean-libgui
mostlyclean-host: maybe-mostlyclean-libiberty
@@ -2171,6 +2185,7 @@ clean-host: maybe-clean-itcl
clean-host: maybe-clean-ld
clean-host: maybe-clean-libbacktrace
clean-host: maybe-clean-libcpp
+clean-host: maybe-clean-libcody
clean-host: maybe-clean-libdecnumber
clean-host: maybe-clean-libgui
clean-host: maybe-clean-libiberty
@@ -2260,6 +2275,7 @@ distclean-host: maybe-distclean-itcl
distclean-host: maybe-distclean-ld
distclean-host: maybe-distclean-libbacktrace
distclean-host: maybe-distclean-libcpp
+distclean-host: maybe-distclean-libcody
distclean-host: maybe-distclean-libdecnumber
distclean-host: maybe-distclean-libgui
distclean-host: maybe-distclean-libiberty
@@ -2349,6 +2365,7 @@ maintainer-clean-host: maybe-maintainer-clean-itcl
maintainer-clean-host: maybe-maintainer-clean-ld
maintainer-clean-host: maybe-maintainer-clean-libbacktrace
maintainer-clean-host: maybe-maintainer-clean-libcpp
+maintainer-clean-host: maybe-maintainer-clean-libcody
maintainer-clean-host: maybe-maintainer-clean-libdecnumber
maintainer-clean-host: maybe-maintainer-clean-libgui
maintainer-clean-host: maybe-maintainer-clean-libiberty
@@ -2494,6 +2511,7 @@ check-host: \
maybe-check-ld \
maybe-check-libbacktrace \
maybe-check-libcpp \
+ maybe-check-libcody \
maybe-check-libdecnumber \
maybe-check-libgui \
maybe-check-libiberty \
@@ -2630,6 +2648,7 @@ install-host-nogcc: \
maybe-install-ld \
maybe-install-libbacktrace \
maybe-install-libcpp \
+ maybe-install-libcody \
maybe-install-libdecnumber \
maybe-install-libgui \
maybe-install-libiberty \
@@ -2683,6 +2702,7 @@ install-host: \
maybe-install-ld \
maybe-install-libbacktrace \
maybe-install-libcpp \
+ maybe-install-libcody \
maybe-install-libdecnumber \
maybe-install-libgui \
maybe-install-libiberty \
@@ -2792,6 +2812,7 @@ install-strip-host: \
maybe-install-strip-ld \
maybe-install-strip-libbacktrace \
maybe-install-strip-libcpp \
+ maybe-install-strip-libcody \
maybe-install-strip-libdecnumber \
maybe-install-strip-libgui \
maybe-install-strip-libiberty \
@@ -25050,6 +25071,1015 @@ maintainer-clean-libcpp:
+.PHONY: configure-libcody maybe-configure-libcody
+maybe-configure-libcody:
+@if gcc-bootstrap
+configure-libcody: stage_current
+@endif gcc-bootstrap
+@if libcody
+maybe-configure-libcody: configure-libcody
+configure-libcody:
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ $(HOST_EXPORTS) \
+ echo Configuring in $(HOST_SUBDIR)/libcody; \
+ cd "$(HOST_SUBDIR)/libcody" || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) \
+ $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ || exit 1
+@endif libcody
+
+
+
+.PHONY: configure-stage1-libcody maybe-configure-stage1-libcody
+maybe-configure-stage1-libcody:
+@if libcody-bootstrap
+maybe-configure-stage1-libcody: configure-stage1-libcody
+configure-stage1-libcody:
+ @[ $(current_stage) = stage1 ] || $(MAKE) stage1-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE1_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ CFLAGS="$(STAGE1_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGE1_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(LIBCFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage 1 in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ \
+ $(STAGE1_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stage2-libcody maybe-configure-stage2-libcody
+maybe-configure-stage2-libcody:
+@if libcody-bootstrap
+maybe-configure-stage2-libcody: configure-stage2-libcody
+configure-stage2-libcody:
+ @[ $(current_stage) = stage2 ] || $(MAKE) stage2-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE2_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGE2_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGE2_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGE2_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage 2 in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGE2_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stage3-libcody maybe-configure-stage3-libcody
+maybe-configure-stage3-libcody:
+@if libcody-bootstrap
+maybe-configure-stage3-libcody: configure-stage3-libcody
+configure-stage3-libcody:
+ @[ $(current_stage) = stage3 ] || $(MAKE) stage3-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE3_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGE3_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGE3_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGE3_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage 3 in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGE3_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stage4-libcody maybe-configure-stage4-libcody
+maybe-configure-stage4-libcody:
+@if libcody-bootstrap
+maybe-configure-stage4-libcody: configure-stage4-libcody
+configure-stage4-libcody:
+ @[ $(current_stage) = stage4 ] || $(MAKE) stage4-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE4_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGE4_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGE4_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGE4_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage 4 in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGE4_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stageprofile-libcody maybe-configure-stageprofile-libcody
+maybe-configure-stageprofile-libcody:
+@if libcody-bootstrap
+maybe-configure-stageprofile-libcody: configure-stageprofile-libcody
+configure-stageprofile-libcody:
+ @[ $(current_stage) = stageprofile ] || $(MAKE) stageprofile-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEprofile_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGEprofile_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGEprofile_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGEprofile_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage profile in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGEprofile_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stagetrain-libcody maybe-configure-stagetrain-libcody
+maybe-configure-stagetrain-libcody:
+@if libcody-bootstrap
+maybe-configure-stagetrain-libcody: configure-stagetrain-libcody
+configure-stagetrain-libcody:
+ @[ $(current_stage) = stagetrain ] || $(MAKE) stagetrain-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEtrain_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGEtrain_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGEtrain_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGEtrain_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage train in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGEtrain_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stagefeedback-libcody maybe-configure-stagefeedback-libcody
+maybe-configure-stagefeedback-libcody:
+@if libcody-bootstrap
+maybe-configure-stagefeedback-libcody: configure-stagefeedback-libcody
+configure-stagefeedback-libcody:
+ @[ $(current_stage) = stagefeedback ] || $(MAKE) stagefeedback-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEfeedback_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGEfeedback_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGEfeedback_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGEfeedback_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage feedback in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGEfeedback_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stageautoprofile-libcody maybe-configure-stageautoprofile-libcody
+maybe-configure-stageautoprofile-libcody:
+@if libcody-bootstrap
+maybe-configure-stageautoprofile-libcody: configure-stageautoprofile-libcody
+configure-stageautoprofile-libcody:
+ @[ $(current_stage) = stageautoprofile ] || $(MAKE) stageautoprofile-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEautoprofile_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGEautoprofile_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGEautoprofile_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGEautoprofile_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage autoprofile in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGEautoprofile_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+.PHONY: configure-stageautofeedback-libcody maybe-configure-stageautofeedback-libcody
+maybe-configure-stageautofeedback-libcody:
+@if libcody-bootstrap
+maybe-configure-stageautofeedback-libcody: configure-stageautofeedback-libcody
+configure-stageautofeedback-libcody:
+ @[ $(current_stage) = stageautofeedback ] || $(MAKE) stageautofeedback-start
+ @$(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEautofeedback_TFLAGS)"; \
+ test ! -f $(HOST_SUBDIR)/libcody/Makefile || exit 0; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ CFLAGS="$(STAGEautofeedback_CFLAGS)"; export CFLAGS; \
+ CXXFLAGS="$(STAGEautofeedback_CXXFLAGS)"; export CXXFLAGS; \
+ LIBCFLAGS="$(STAGEautofeedback_CFLAGS)"; export LIBCFLAGS; \
+ echo Configuring stage autofeedback in $(HOST_SUBDIR)/libcody; \
+ $(SHELL) $(srcdir)/mkinstalldirs $(HOST_SUBDIR)/libcody; \
+ cd $(HOST_SUBDIR)/libcody || exit 1; \
+ case $(srcdir) in \
+ /* | [A-Za-z]:[\\/]*) topdir=$(srcdir) ;; \
+ *) topdir=`echo $(HOST_SUBDIR)/libcody/ | \
+ sed -e 's,\./,,g' -e 's,[^/]*/,../,g' `$(srcdir) ;; \
+ esac; \
+ module_srcdir=libcody; \
+ $(SHELL) $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+ --target=${target_alias} \
+ --with-build-libsubdir=$(HOST_SUBDIR) \
+ $(STAGEautofeedback_CONFIGURE_FLAGS)
+@endif libcody-bootstrap
+
+
+
+
+
+.PHONY: all-libcody maybe-all-libcody
+maybe-all-libcody:
+@if gcc-bootstrap
+all-libcody: stage_current
+@endif gcc-bootstrap
+@if libcody
+TARGET-libcody=all
+maybe-all-libcody: all-libcody
+all-libcody: configure-libcody
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) $(EXTRA_HOST_FLAGS) $(STAGE1_FLAGS_TO_PASS) \
+ $(TARGET-libcody))
+@endif libcody
+
+
+
+.PHONY: all-stage1-libcody maybe-all-stage1-libcody
+.PHONY: clean-stage1-libcody maybe-clean-stage1-libcody
+maybe-all-stage1-libcody:
+maybe-clean-stage1-libcody:
+@if libcody-bootstrap
+maybe-all-stage1-libcody: all-stage1-libcody
+all-stage1: all-stage1-libcody
+TARGET-stage1-libcody = $(TARGET-libcody)
+all-stage1-libcody: configure-stage1-libcody
+ @[ $(current_stage) = stage1 ] || $(MAKE) stage1-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE1_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGE1_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGE1_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGE1_CXXFLAGS)" \
+ LIBCFLAGS="$(LIBCFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) \
+ $(STAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGE1_TFLAGS)" \
+ $(TARGET-stage1-libcody)
+
+maybe-clean-stage1-libcody: clean-stage1-libcody
+clean-stage1: clean-stage1-libcody
+clean-stage1-libcody:
+ @if [ $(current_stage) = stage1 ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stage1-libcody/Makefile ] || exit 0; \
+ $(MAKE) stage1-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) \
+ $(STAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stage2-libcody maybe-all-stage2-libcody
+.PHONY: clean-stage2-libcody maybe-clean-stage2-libcody
+maybe-all-stage2-libcody:
+maybe-clean-stage2-libcody:
+@if libcody-bootstrap
+maybe-all-stage2-libcody: all-stage2-libcody
+all-stage2: all-stage2-libcody
+TARGET-stage2-libcody = $(TARGET-libcody)
+all-stage2-libcody: configure-stage2-libcody
+ @[ $(current_stage) = stage2 ] || $(MAKE) stage2-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE2_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGE2_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGE2_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGE2_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGE2_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGE2_TFLAGS)" \
+ $(TARGET-stage2-libcody)
+
+maybe-clean-stage2-libcody: clean-stage2-libcody
+clean-stage2: clean-stage2-libcody
+clean-stage2-libcody:
+ @if [ $(current_stage) = stage2 ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stage2-libcody/Makefile ] || exit 0; \
+ $(MAKE) stage2-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stage3-libcody maybe-all-stage3-libcody
+.PHONY: clean-stage3-libcody maybe-clean-stage3-libcody
+maybe-all-stage3-libcody:
+maybe-clean-stage3-libcody:
+@if libcody-bootstrap
+maybe-all-stage3-libcody: all-stage3-libcody
+all-stage3: all-stage3-libcody
+TARGET-stage3-libcody = $(TARGET-libcody)
+all-stage3-libcody: configure-stage3-libcody
+ @[ $(current_stage) = stage3 ] || $(MAKE) stage3-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE3_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGE3_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGE3_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGE3_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGE3_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGE3_TFLAGS)" \
+ $(TARGET-stage3-libcody)
+
+maybe-clean-stage3-libcody: clean-stage3-libcody
+clean-stage3: clean-stage3-libcody
+clean-stage3-libcody:
+ @if [ $(current_stage) = stage3 ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stage3-libcody/Makefile ] || exit 0; \
+ $(MAKE) stage3-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stage4-libcody maybe-all-stage4-libcody
+.PHONY: clean-stage4-libcody maybe-clean-stage4-libcody
+maybe-all-stage4-libcody:
+maybe-clean-stage4-libcody:
+@if libcody-bootstrap
+maybe-all-stage4-libcody: all-stage4-libcody
+all-stage4: all-stage4-libcody
+TARGET-stage4-libcody = $(TARGET-libcody)
+all-stage4-libcody: configure-stage4-libcody
+ @[ $(current_stage) = stage4 ] || $(MAKE) stage4-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGE4_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGE4_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGE4_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGE4_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGE4_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGE4_TFLAGS)" \
+ $(TARGET-stage4-libcody)
+
+maybe-clean-stage4-libcody: clean-stage4-libcody
+clean-stage4: clean-stage4-libcody
+clean-stage4-libcody:
+ @if [ $(current_stage) = stage4 ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stage4-libcody/Makefile ] || exit 0; \
+ $(MAKE) stage4-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stageprofile-libcody maybe-all-stageprofile-libcody
+.PHONY: clean-stageprofile-libcody maybe-clean-stageprofile-libcody
+maybe-all-stageprofile-libcody:
+maybe-clean-stageprofile-libcody:
+@if libcody-bootstrap
+maybe-all-stageprofile-libcody: all-stageprofile-libcody
+all-stageprofile: all-stageprofile-libcody
+TARGET-stageprofile-libcody = $(TARGET-libcody)
+all-stageprofile-libcody: configure-stageprofile-libcody
+ @[ $(current_stage) = stageprofile ] || $(MAKE) stageprofile-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEprofile_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGEprofile_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGEprofile_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGEprofile_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGEprofile_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGEprofile_TFLAGS)" \
+ $(TARGET-stageprofile-libcody)
+
+maybe-clean-stageprofile-libcody: clean-stageprofile-libcody
+clean-stageprofile: clean-stageprofile-libcody
+clean-stageprofile-libcody:
+ @if [ $(current_stage) = stageprofile ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stageprofile-libcody/Makefile ] || exit 0; \
+ $(MAKE) stageprofile-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stagetrain-libcody maybe-all-stagetrain-libcody
+.PHONY: clean-stagetrain-libcody maybe-clean-stagetrain-libcody
+maybe-all-stagetrain-libcody:
+maybe-clean-stagetrain-libcody:
+@if libcody-bootstrap
+maybe-all-stagetrain-libcody: all-stagetrain-libcody
+all-stagetrain: all-stagetrain-libcody
+TARGET-stagetrain-libcody = $(TARGET-libcody)
+all-stagetrain-libcody: configure-stagetrain-libcody
+ @[ $(current_stage) = stagetrain ] || $(MAKE) stagetrain-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEtrain_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGEtrain_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGEtrain_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGEtrain_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGEtrain_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGEtrain_TFLAGS)" \
+ $(TARGET-stagetrain-libcody)
+
+maybe-clean-stagetrain-libcody: clean-stagetrain-libcody
+clean-stagetrain: clean-stagetrain-libcody
+clean-stagetrain-libcody:
+ @if [ $(current_stage) = stagetrain ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stagetrain-libcody/Makefile ] || exit 0; \
+ $(MAKE) stagetrain-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stagefeedback-libcody maybe-all-stagefeedback-libcody
+.PHONY: clean-stagefeedback-libcody maybe-clean-stagefeedback-libcody
+maybe-all-stagefeedback-libcody:
+maybe-clean-stagefeedback-libcody:
+@if libcody-bootstrap
+maybe-all-stagefeedback-libcody: all-stagefeedback-libcody
+all-stagefeedback: all-stagefeedback-libcody
+TARGET-stagefeedback-libcody = $(TARGET-libcody)
+all-stagefeedback-libcody: configure-stagefeedback-libcody
+ @[ $(current_stage) = stagefeedback ] || $(MAKE) stagefeedback-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEfeedback_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGEfeedback_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGEfeedback_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGEfeedback_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGEfeedback_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGEfeedback_TFLAGS)" \
+ $(TARGET-stagefeedback-libcody)
+
+maybe-clean-stagefeedback-libcody: clean-stagefeedback-libcody
+clean-stagefeedback: clean-stagefeedback-libcody
+clean-stagefeedback-libcody:
+ @if [ $(current_stage) = stagefeedback ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stagefeedback-libcody/Makefile ] || exit 0; \
+ $(MAKE) stagefeedback-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stageautoprofile-libcody maybe-all-stageautoprofile-libcody
+.PHONY: clean-stageautoprofile-libcody maybe-clean-stageautoprofile-libcody
+maybe-all-stageautoprofile-libcody:
+maybe-clean-stageautoprofile-libcody:
+@if libcody-bootstrap
+maybe-all-stageautoprofile-libcody: all-stageautoprofile-libcody
+all-stageautoprofile: all-stageautoprofile-libcody
+TARGET-stageautoprofile-libcody = $(TARGET-libcody)
+all-stageautoprofile-libcody: configure-stageautoprofile-libcody
+ @[ $(current_stage) = stageautoprofile ] || $(MAKE) stageautoprofile-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEautoprofile_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ $$s/gcc/config/i386/$(AUTO_PROFILE) \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGEautoprofile_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGEautoprofile_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGEautoprofile_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGEautoprofile_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGEautoprofile_TFLAGS)" \
+ $(TARGET-stageautoprofile-libcody)
+
+maybe-clean-stageautoprofile-libcody: clean-stageautoprofile-libcody
+clean-stageautoprofile: clean-stageautoprofile-libcody
+clean-stageautoprofile-libcody:
+ @if [ $(current_stage) = stageautoprofile ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stageautoprofile-libcody/Makefile ] || exit 0; \
+ $(MAKE) stageautoprofile-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+.PHONY: all-stageautofeedback-libcody maybe-all-stageautofeedback-libcody
+.PHONY: clean-stageautofeedback-libcody maybe-clean-stageautofeedback-libcody
+maybe-all-stageautofeedback-libcody:
+maybe-clean-stageautofeedback-libcody:
+@if libcody-bootstrap
+maybe-all-stageautofeedback-libcody: all-stageautofeedback-libcody
+all-stageautofeedback: all-stageautofeedback-libcody
+TARGET-stageautofeedback-libcody = $(TARGET-libcody)
+all-stageautofeedback-libcody: configure-stageautofeedback-libcody
+ @[ $(current_stage) = stageautofeedback ] || $(MAKE) stageautofeedback-start
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ TFLAGS="$(STAGEautofeedback_TFLAGS)"; \
+ $(HOST_EXPORTS) \
+ $(POSTSTAGE1_HOST_EXPORTS) \
+ cd $(HOST_SUBDIR)/libcody && \
+ \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) \
+ CFLAGS="$(STAGEautofeedback_CFLAGS)" \
+ GENERATOR_CFLAGS="$(STAGEautofeedback_GENERATOR_CFLAGS)" \
+ CXXFLAGS="$(STAGEautofeedback_CXXFLAGS)" \
+ LIBCFLAGS="$(STAGEautofeedback_CFLAGS)" \
+ CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \
+ CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \
+ LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \
+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) \
+ TFLAGS="$(STAGEautofeedback_TFLAGS)" PERF_DATA=perf.data \
+ $(TARGET-stageautofeedback-libcody)
+
+maybe-clean-stageautofeedback-libcody: clean-stageautofeedback-libcody
+clean-stageautofeedback: clean-stageautofeedback-libcody
+clean-stageautofeedback-libcody:
+ @if [ $(current_stage) = stageautofeedback ]; then \
+ [ -f $(HOST_SUBDIR)/libcody/Makefile ] || exit 0; \
+ else \
+ [ -f $(HOST_SUBDIR)/stageautofeedback-libcody/Makefile ] || exit 0; \
+ $(MAKE) stageautofeedback-start; \
+ fi; \
+ cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) clean
+@endif libcody-bootstrap
+
+
+
+
+
+.PHONY: check-libcody maybe-check-libcody
+maybe-check-libcody:
+@if libcody
+maybe-check-libcody: check-libcody
+
+check-libcody:
+ @: $(MAKE); $(unstage)
+ @r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) $(EXTRA_HOST_EXPORTS) \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(FLAGS_TO_PASS) $(EXTRA_BOOTSTRAP_FLAGS) check)
+
+@endif libcody
+
+.PHONY: install-libcody maybe-install-libcody
+maybe-install-libcody:
+@if libcody
+maybe-install-libcody: install-libcody
+
+install-libcody:
+
+@endif libcody
+
+.PHONY: install-strip-libcody maybe-install-strip-libcody
+maybe-install-strip-libcody:
+@if libcody
+maybe-install-strip-libcody: install-strip-libcody
+
+install-strip-libcody:
+
+@endif libcody
+
+# Other targets (info, dvi, pdf, etc.)
+
+.PHONY: maybe-info-libcody info-libcody
+maybe-info-libcody:
+@if libcody
+maybe-info-libcody: info-libcody
+
+# libcody doesn't support info.
+info-libcody:
+
+@endif libcody
+
+.PHONY: maybe-dvi-libcody dvi-libcody
+maybe-dvi-libcody:
+@if libcody
+maybe-dvi-libcody: dvi-libcody
+
+dvi-libcody: \
+ configure-libcody
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing dvi in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ dvi) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-pdf-libcody pdf-libcody
+maybe-pdf-libcody:
+@if libcody
+maybe-pdf-libcody: pdf-libcody
+
+# libcody doesn't support pdf.
+pdf-libcody:
+
+@endif libcody
+
+.PHONY: maybe-html-libcody html-libcody
+maybe-html-libcody:
+@if libcody
+maybe-html-libcody: html-libcody
+
+# libcody doesn't support html.
+html-libcody:
+
+@endif libcody
+
+.PHONY: maybe-TAGS-libcody TAGS-libcody
+maybe-TAGS-libcody:
+@if libcody
+maybe-TAGS-libcody: TAGS-libcody
+
+TAGS-libcody: \
+ configure-libcody
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing TAGS in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ TAGS) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-install-info-libcody install-info-libcody
+maybe-install-info-libcody:
+@if libcody
+maybe-install-info-libcody: install-info-libcody
+
+# libcody doesn't support install-info.
+install-info-libcody:
+
+@endif libcody
+
+.PHONY: maybe-install-pdf-libcody install-pdf-libcody
+maybe-install-pdf-libcody:
+@if libcody
+maybe-install-pdf-libcody: install-pdf-libcody
+
+# libcody doesn't support install-pdf.
+install-pdf-libcody:
+
+@endif libcody
+
+.PHONY: maybe-install-html-libcody install-html-libcody
+maybe-install-html-libcody:
+@if libcody
+maybe-install-html-libcody: install-html-libcody
+
+# libcody doesn't support install-html.
+install-html-libcody:
+
+@endif libcody
+
+.PHONY: maybe-installcheck-libcody installcheck-libcody
+maybe-installcheck-libcody:
+@if libcody
+maybe-installcheck-libcody: installcheck-libcody
+
+installcheck-libcody: \
+ configure-libcody
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing installcheck in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ installcheck) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-mostlyclean-libcody mostlyclean-libcody
+maybe-mostlyclean-libcody:
+@if libcody
+maybe-mostlyclean-libcody: mostlyclean-libcody
+
+mostlyclean-libcody:
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing mostlyclean in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ mostlyclean) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-clean-libcody clean-libcody
+maybe-clean-libcody:
+@if libcody
+maybe-clean-libcody: clean-libcody
+
+clean-libcody:
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing clean in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ clean) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-distclean-libcody distclean-libcody
+maybe-distclean-libcody:
+@if libcody
+maybe-distclean-libcody: distclean-libcody
+
+distclean-libcody:
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing distclean in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ distclean) \
+ || exit 1
+
+@endif libcody
+
+.PHONY: maybe-maintainer-clean-libcody maintainer-clean-libcody
+maybe-maintainer-clean-libcody:
+@if libcody
+maybe-maintainer-clean-libcody: maintainer-clean-libcody
+
+maintainer-clean-libcody:
+ @[ -f ./libcody/Makefile ] || exit 0; \
+ r=`${PWD_COMMAND}`; export r; \
+ s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \
+ $(HOST_EXPORTS) \
+ for flag in $(EXTRA_HOST_FLAGS) ; do \
+ eval `echo "$$flag" | sed -e "s|^\([^=]*\)=\(.*\)|\1='\2'; export \1|"`; \
+ done; \
+ echo "Doing maintainer-clean in libcody"; \
+ (cd $(HOST_SUBDIR)/libcody && \
+ $(MAKE) $(BASE_FLAGS_TO_PASS) "AR=$${AR}" "AS=$${AS}" \
+ "CC=$${CC}" "CXX=$${CXX}" "LD=$${LD}" "NM=$${NM}" \
+ "RANLIB=$${RANLIB}" \
+ "DLLTOOL=$${DLLTOOL}" "WINDRES=$${WINDRES}" "WINDMC=$${WINDMC}" \
+ maintainer-clean) \
+ || exit 1
+
+@endif libcody
+
+
+
.PHONY: configure-libdecnumber maybe-configure-libdecnumber
maybe-configure-libdecnumber:
@if gcc-bootstrap
@@ -55705,6 +56735,11 @@ stage1-start::
mkdir stage1-libcpp; \
mv stage1-libcpp libcpp
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stage1-libcody ] || \
+ mkdir stage1-libcody; \
+ mv stage1-libcody libcody
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stage1-libdecnumber ] || \
mkdir stage1-libdecnumber; \
@@ -55825,6 +56860,11 @@ stage1-end::
cd $(HOST_SUBDIR); mv libcpp stage1-libcpp; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stage1-libcody; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stage1-libdecnumber; \
@@ -56004,6 +57044,12 @@ stage2-start::
mv stage2-libcpp libcpp; \
mv stage1-libcpp prev-libcpp || test -f stage1-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stage2-libcody ] || \
+ mkdir stage2-libcody; \
+ mv stage2-libcody libcody; \
+ mv stage1-libcody prev-libcody || test -f stage1-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stage2-libdecnumber ] || \
mkdir stage2-libdecnumber; \
@@ -56148,6 +57194,12 @@ stage2-end::
mv prev-libcpp stage1-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stage2-libcody; \
+ mv prev-libcody stage1-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stage2-libdecnumber; \
@@ -56358,6 +57410,12 @@ stage3-start::
mv stage3-libcpp libcpp; \
mv stage2-libcpp prev-libcpp || test -f stage2-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stage3-libcody ] || \
+ mkdir stage3-libcody; \
+ mv stage3-libcody libcody; \
+ mv stage2-libcody prev-libcody || test -f stage2-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stage3-libdecnumber ] || \
mkdir stage3-libdecnumber; \
@@ -56502,6 +57560,12 @@ stage3-end::
mv prev-libcpp stage2-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stage3-libcody; \
+ mv prev-libcody stage2-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stage3-libdecnumber; \
@@ -56768,6 +57832,12 @@ stage4-start::
mv stage4-libcpp libcpp; \
mv stage3-libcpp prev-libcpp || test -f stage3-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stage4-libcody ] || \
+ mkdir stage4-libcody; \
+ mv stage4-libcody libcody; \
+ mv stage3-libcody prev-libcody || test -f stage3-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stage4-libdecnumber ] || \
mkdir stage4-libdecnumber; \
@@ -56912,6 +57982,12 @@ stage4-end::
mv prev-libcpp stage3-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stage4-libcody; \
+ mv prev-libcody stage3-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stage4-libdecnumber; \
@@ -57166,6 +58242,12 @@ stageprofile-start::
mv stageprofile-libcpp libcpp; \
mv stage1-libcpp prev-libcpp || test -f stage1-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stageprofile-libcody ] || \
+ mkdir stageprofile-libcody; \
+ mv stageprofile-libcody libcody; \
+ mv stage1-libcody prev-libcody || test -f stage1-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stageprofile-libdecnumber ] || \
mkdir stageprofile-libdecnumber; \
@@ -57310,6 +58392,12 @@ stageprofile-end::
mv prev-libcpp stage1-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stageprofile-libcody; \
+ mv prev-libcody stage1-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stageprofile-libdecnumber; \
@@ -57497,6 +58585,12 @@ stagetrain-start::
mv stagetrain-libcpp libcpp; \
mv stageprofile-libcpp prev-libcpp || test -f stageprofile-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stagetrain-libcody ] || \
+ mkdir stagetrain-libcody; \
+ mv stagetrain-libcody libcody; \
+ mv stageprofile-libcody prev-libcody || test -f stageprofile-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stagetrain-libdecnumber ] || \
mkdir stagetrain-libdecnumber; \
@@ -57641,6 +58735,12 @@ stagetrain-end::
mv prev-libcpp stageprofile-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stagetrain-libcody; \
+ mv prev-libcody stageprofile-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stagetrain-libdecnumber; \
@@ -57828,6 +58928,12 @@ stagefeedback-start::
mv stagefeedback-libcpp libcpp; \
mv stagetrain-libcpp prev-libcpp || test -f stagetrain-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stagefeedback-libcody ] || \
+ mkdir stagefeedback-libcody; \
+ mv stagefeedback-libcody libcody; \
+ mv stagetrain-libcody prev-libcody || test -f stagetrain-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stagefeedback-libdecnumber ] || \
mkdir stagefeedback-libdecnumber; \
@@ -57972,6 +59078,12 @@ stagefeedback-end::
mv prev-libcpp stagetrain-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stagefeedback-libcody; \
+ mv prev-libcody stagetrain-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stagefeedback-libdecnumber; \
@@ -58182,6 +59294,12 @@ stageautoprofile-start::
mv stageautoprofile-libcpp libcpp; \
mv stage1-libcpp prev-libcpp || test -f stage1-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stageautoprofile-libcody ] || \
+ mkdir stageautoprofile-libcody; \
+ mv stageautoprofile-libcody libcody; \
+ mv stage1-libcody prev-libcody || test -f stage1-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stageautoprofile-libdecnumber ] || \
mkdir stageautoprofile-libdecnumber; \
@@ -58326,6 +59444,12 @@ stageautoprofile-end::
mv prev-libcpp stage1-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stageautoprofile-libcody; \
+ mv prev-libcody stage1-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stageautoprofile-libdecnumber; \
@@ -58513,6 +59637,12 @@ stageautofeedback-start::
mv stageautofeedback-libcpp libcpp; \
mv stageautoprofile-libcpp prev-libcpp || test -f stageautoprofile-lean
@endif libcpp
+@if libcody
+ @cd $(HOST_SUBDIR); [ -d stageautofeedback-libcody ] || \
+ mkdir stageautofeedback-libcody; \
+ mv stageautofeedback-libcody libcody; \
+ mv stageautoprofile-libcody prev-libcody || test -f stageautoprofile-lean
+@endif libcody
@if libdecnumber
@cd $(HOST_SUBDIR); [ -d stageautofeedback-libdecnumber ] || \
mkdir stageautofeedback-libdecnumber; \
@@ -58657,6 +59787,12 @@ stageautofeedback-end::
mv prev-libcpp stageautoprofile-libcpp; : ; \
fi
@endif libcpp
+@if libcody
+ @if test -d $(HOST_SUBDIR)/libcody; then \
+ cd $(HOST_SUBDIR); mv libcody stageautofeedback-libcody; \
+ mv prev-libcody stageautoprofile-libcody; : ; \
+ fi
+@endif libcody
@if libdecnumber
@if test -d $(HOST_SUBDIR)/libdecnumber; then \
cd $(HOST_SUBDIR); mv libdecnumber stageautofeedback-libdecnumber; \
@@ -59185,6 +60321,16 @@ all-stagetrain-gcc: all-stagetrain-libcpp
all-stagefeedback-gcc: all-stagefeedback-libcpp
all-stageautoprofile-gcc: all-stageautoprofile-libcpp
all-stageautofeedback-gcc: all-stageautofeedback-libcpp
+all-gcc: all-libcody
+all-stage1-gcc: all-stage1-libcody
+all-stage2-gcc: all-stage2-libcody
+all-stage3-gcc: all-stage3-libcody
+all-stage4-gcc: all-stage4-libcody
+all-stageprofile-gcc: all-stageprofile-libcody
+all-stagetrain-gcc: all-stagetrain-libcody
+all-stagefeedback-gcc: all-stagefeedback-libcody
+all-stageautoprofile-gcc: all-stageautoprofile-libcody
+all-stageautofeedback-gcc: all-stageautofeedback-libcody
all-gcc: all-libdecnumber
all-stage1-gcc: all-stage1-libdecnumber
all-stage2-gcc: all-stage2-libdecnumber
diff --git a/configure b/configure
index d079883..8f4302e 100755
--- a/configure
+++ b/configure
@@ -2787,7 +2787,7 @@ build_tools="build-texinfo build-flex build-bison build-m4 build-fixincludes"
# these libraries are used by various programs built for the host environment
#f
-host_libs="intl libiberty opcodes bfd readline tcl tk itcl libgui zlib libbacktrace libcpp libdecnumber gmp mpfr mpc isl libelf libiconv libctf"
+host_libs="intl libiberty opcodes bfd readline tcl tk itcl libgui zlib libbacktrace libcpp libcody libdecnumber gmp mpfr mpc isl libelf libiconv libctf"
# these tools are built for the host environment
# Note, the powerpc-eabi build depends on sim occurring before gdb in order to
diff --git a/configure.ac b/configure.ac
index 12f85cc..56fdbf4 100644
--- a/configure.ac
+++ b/configure.ac
@@ -132,7 +132,7 @@ build_tools="build-texinfo build-flex build-bison build-m4 build-fixincludes"
# these libraries are used by various programs built for the host environment
#f
-host_libs="intl libiberty opcodes bfd readline tcl tk itcl libgui zlib libbacktrace libcpp libdecnumber gmp mpfr mpc isl libelf libiconv libctf"
+host_libs="intl libiberty opcodes bfd readline tcl tk itcl libgui zlib libbacktrace libcpp libcody libdecnumber gmp mpfr mpc isl libelf libiconv libctf"
# these tools are built for the host environment
# Note, the powerpc-eabi build depends on sim occurring before gdb in order to
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index a6f995a..0d6084c 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -412,6 +412,9 @@ enable_as_accelerator = @enable_as_accelerator@
CPPLIB = ../libcpp/libcpp.a
CPPINC = -I$(srcdir)/../libcpp/include
+CODYLIB = ../libcody/libcody.a
+CODYINC = -I$(srcdir)/../libcody
+
# Where to find decNumber
enable_decimal_float = @enable_decimal_float@
DECNUM = $(srcdir)/../libdecnumber
@@ -982,6 +985,7 @@ SYSTEM_H = system.h hwint.h $(srcdir)/../include/libiberty.h \
PREDICT_H = predict.h predict.def
CPPLIB_H = $(srcdir)/../libcpp/include/line-map.h \
$(srcdir)/../libcpp/include/cpplib.h
+CODYLIB_H = $(srcdir)/../libcody/cody.hh
INPUT_H = $(srcdir)/../libcpp/include/line-map.h input.h
OPTS_H = $(INPUT_H) $(VEC_H) opts.h $(OBSTACK_H)
SYMTAB_H = $(srcdir)/../libcpp/include/symtab.h $(OBSTACK_H)
@@ -1102,7 +1106,7 @@ BUILD_ERRORS = build/errors.o
# libintl.h will be found in ../intl if we are using the included libintl.
INCLUDES = -I. -I$(@D) -I$(srcdir) -I$(srcdir)/$(@D) \
-I$(srcdir)/../include @INCINTL@ \
- $(CPPINC) $(GMPINC) $(DECNUMINC) $(BACKTRACEINC) \
+ $(CPPINC) $(CODYINC) $(GMPINC) $(DECNUMINC) $(BACKTRACEINC) \
$(ISLINC)
COMPILE.base = $(COMPILER) -c $(ALL_COMPILERFLAGS) $(ALL_CPPFLAGS) -o $@
@@ -1714,7 +1718,7 @@ endif
ALL_HOST_OBJS = $(ALL_HOST_FRONTEND_OBJS) $(ALL_HOST_BACKEND_OBJS)
BACKEND = libbackend.a main.o libcommon-target.a libcommon.a \
- $(CPPLIB) $(LIBDECNUMBER)
+ $(CPPLIB) $(CODYLIB) $(LIBDECNUMBER)
# This is defined to "yes" if Tree checking is enabled, which roughly means
# front-end checking.
diff --git a/libcody/CMakeLists.txt b/libcody/CMakeLists.txt
new file mode 100644
index 0000000..72e59f9
--- /dev/null
+++ b/libcody/CMakeLists.txt
@@ -0,0 +1,121 @@
+# Top Level CMake file for libcody.
+
+cmake_minimum_required(VERSION 3.4.3)
+
+if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+ message(STATUS "No build type selected, default to MinSizeRel")
+ set(CMAKE_BUILD_TYPE MinSizeRel)
+ set(LIBCODY_ENABLE_ASSERTIONS 1)
+endif()
+string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+
+set(cmake_3_2_USES_TERMINAL USES_TERMINAL)
+
+if( CMAKE_SOURCE_DIR STREQUAL CMAKE_BINARY_DIR AND NOT MSVC_IDE)
+ message(FATAL_ERROR "In-source builds are not allowed. ")
+endif()
+
+# message(STATUS "SRC ${CMAKE_SOURCE_DIR} CSRC : ${CMAKE_CURRENT_SOURCE_DIR} ")
+
+# Add path for custom modules
+set(CMAKE_MODULE_PATH
+ ${CMAKE_MODULE_PATH}
+ "${CMAKE_CURRENT_SOURCE_DIR}/cmake"
+ "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules")
+
+# If we are building stand-alone, set up the names and versions.
+if(CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR
+ OR LIBCODY_STANDALONE_BUILD)
+
+ project(libcody CXX)
+
+ set(PACKAGE_NAME codylib)
+ set(LIBCODY_VERSION_MAJOR 0)
+ set(LIBCODY_VERSION_MINOR 0)
+ set(LIBCODY_VERSION_PATCH 1)
+ set(LIBCODY_VERSION_SUFFIX git)
+ set(LIBCODY_VERSION "${LIBCODY_VERSION_MAJOR}.${LIBCODY_VERSION_MINOR}.${LIBCODY_VERSION_PATCH}")
+ set(PACKAGE_VERSION "${LIBCODY_VERSION}-${LIBCODY_VERSION_SUFFIX}")
+ set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
+ set(PACKAGE_URL "https://github.com/urnathan/libcody")
+ set(PACKAGE_BUGREPORT "https://github.com/urnathan/libcody/issues")
+
+ set (GIT_REV "git" "-C" "${CMAKE_CURRENT_SOURCE_DIR}" "rev-parse" "--short=12" "HEAD")
+ execute_process(
+ COMMAND ${GIT_REV}
+ RESULT_VARIABLE HAD_ERROR
+ OUTPUT_VARIABLE CODY_REVISION
+ )
+ if (NOT HAD_ERROR)
+ string(REGEX REPLACE "\n$" "" CODY_REVISION "${CODY_REVISION}")
+ set (GIT_CHANGES "git" "-C" "${CMAKE_CURRENT_SOURCE_DIR}" "diff-index" "--quiet" "HEAD" "--")
+ execute_process(
+ COMMAND ${GIT_CHANGES}
+ RESULT_VARIABLE MOD_ERROR
+ OUTPUT_VARIABLE MOD_OUTPUT
+ )
+ if (MOD_ERROR)
+ set (CODY_REVISION "${CODY_REVISION}-modified")
+ endif ()
+ else()
+ set(CODY_REVISION, "unknown")
+ endif ()
+ set(LIBCODY_STANDALONE YES)
+else()
+ set(LIBCODY_STANDALONE NO)
+endif()
+
+# We are using C++11
+set (CMAKE_CXX_STANDARD 11)
+
+message(STATUS "git revision ${CODY_REVISION} ")
+option(CODY_CHECKING "Enable checking" ON)
+# Address github issue #10
+option(CODY_WITHEXCEPTIONS "Enable exceptions" OFF)
+
+if (LIBCODY_STANDALONE)
+ include(CTest)
+endif()
+
+include(libcody-config-ix)
+
+add_definitions(
+ -DPACKAGE_URL="${PACKAGE_URL}"
+ -DBUGURL="${PACKAGE_BUGREPORT}"
+ -DSRCDIR="${CMAKE_CURRENT_SOURCE_DIR}"
+ -DPACKAGE_NAME="${PACKAGE_NAME}"
+ -DPACKAGE_STRING="${PACKAGE_STRING}"
+ -DPACKAGE_VERSION="${LIBCODY_VERSION}"
+ -DREVISION="${CODY_REVISION}"
+ )
+if (CODY_CHECKING)
+ add_definitions(-DNMS_CHECKING=1)
+else()
+ add_definitions(-DNMS_CHECKING=0)
+endif()
+
+set(LIBCODY_SOURCES
+ buffer.cc
+ client.cc
+ fatal.cc
+ netclient.cc
+ netserver.cc
+ resolver.cc
+ packet.cc
+ server.cc)
+
+if(LIBCODY_STANDALONE)
+ add_library(cody STATIC ${LIBCODY_SOURCES})
+else()
+ message(STATUS "Configured for in-tree build of libcody as LLVMcody")
+ add_llvm_component_library(LLVMcody ${LIBCODY_SOURCES})
+endif()
+
+if (LIBCODY_STANDALONE)
+
+ set_target_properties(cody PROPERTIES PUBLIC_HEADER "cody.hh")
+ install(TARGETS cody
+ LIBRARY DESTINATION lib
+ PUBLIC_HEADER DESTINATION include
+ )
+endif()
diff --git a/libcody/CODING.md b/libcody/CODING.md
new file mode 100644
index 0000000..1ff0a9d
--- /dev/null
+++ b/libcody/CODING.md
@@ -0,0 +1,115 @@
+# Coding standard
+
+I guess I should document this, it might not be obvious.
+
+libcody is implemented in C++11. Because it's used in compiler
+development, we can't use the latest and greatest.
+
+The formatting is close to GNU, but with a few differences.
+
+## Extensions to C++11
+
+It uses __VA_OPT__ when available, falling back on GNU's variadic
+macro `,#` extension. This is in the `Assert` macro, so one can have
+multi-argument template instantiations there. Not that libcody does
+that, but this is code I used elsewhere.
+
+## GNU
+
+The underlying formatting is GNU style. Here are a few notes about
+things that commonly catches programmers unfamiliar with it is:
+
+* Spaces between binary operators. Particularly in a function call,
+ between the name and the open paren:
+
+ ```c++
+ Fn (a + b, ary[4], *ptr);
+ ```
+
+ In general GNU style uses a lot more whitespace than Clang-style.
+ We're not trying to cram as much code as possible onto a page!
+
+* Scope braces are always on a line of their own, indented by 2
+ spaces, if they're a sub-statement of an `if`, `for` or whatever:
+
+ ```c++
+ if (bob)
+ {
+ Frob ();
+ Quux ();
+ }
+ ```
+
+ Conditions and loops containing a single statement should not use `{}`.
+ FWIW this was my personal indentation scheme, before I even met GNU code!
+
+* The same is true for a function definition body, except the
+ indentation is zero:
+
+ ```c++
+ int Foo ()
+ noexcept // indented
+ {
+ return 0;
+ }
+ ```
+
+* Initialization bracing is not like scope bracing. There tends to be
+ more flexibility.
+
+* Break lines at 80 chars, this should be /before/ the operator, not after:
+
+ ```c++
+ a = (b
+ + c);
+ ptr
+ ->MemberFn (stuff);
+ Func
+ (arg);
+ ```
+
+ Thus you can tell what lines are continued from the previous by
+ looking at their start. Use parens to control indentation.
+
+ If you find yourself wanting to break a line at `.`, don't.
+ Refactor your code to avoid needing that.
+
+* Template instantiations and C++ casts should have no space before the `<`:
+
+ ```c++
+ std::vector<int> k;
+ static_cast<T> (arg); // space before the ( though
+ ```
+
+* Pointer and reference types need a space before the `*` or `&`, if
+ the preceding token is ascii text (a cpp-identifier):
+
+ ```
+ int *ptr;
+ int **ptr_ptr;
+ int *&pref = ptr;
+ ```
+
+ See below a difference in qualifier placement.
+
+* Code should compile without warnings.
+
+## Not GNU
+
+### Names
+
+Unlike GNU code, variants of Camel Case are used. use `PascalCase`
+for function, type and global variable names. Use `dromedaryCase` for
+member variables. Block-scope vars can be `dromedaryCase` or
+`snake_case`, your choice.
+
+### Type qualifiers
+
+Type qualifiers go after the thing they qualify. You have to do this
+for pointers anyway, and read them inside-out, because, C Just being
+consistent:
+
+```c++
+int const foo = 5; // constant int
+int *const pfoo = nullptr; // constant pointer to int
+```
diff --git a/libcody/CONTRIB.md b/libcody/CONTRIB.md
new file mode 100644
index 0000000..234ed06
--- /dev/null
+++ b/libcody/CONTRIB.md
@@ -0,0 +1,10 @@
+# A probably incomplete list of contributors
+
+Thanks for the interest in this library!
+
+* Iain Sandoe
+ Darwin testing and porting
+
+* Martin Liska
+ Code cleanups
+
diff --git a/libcody/LICENSE b/libcody/LICENSE
new file mode 100644
index 0000000..261eeb9e
--- /dev/null
+++ b/libcody/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/libcody/LICENSE.gcc b/libcody/LICENSE.gcc
new file mode 100644
index 0000000..6a72a2b
--- /dev/null
+++ b/libcody/LICENSE.gcc
@@ -0,0 +1,29 @@
+This instance of Libcody is licensed under the GPLv3 as part of
+GCC. (See ../COPYING3.)
+
+The documentation of this file itself is separate from libcody.
+
+The GCC steering committee have declined to decide whether libcody may
+be included in GCC as an external dependency licensed under the Apache
+v2 license.
+
+In order to progress C++20 modules, this instance is relicensed as
+part of GCC under the GPLv3 and assigned to the FSF. This version is
+derived from libcody upstream b79dbea with contributions from jjravi
+removed. Specifically the bulk of the code is authored by Nathan
+Sidwell (me), and some portability issues fixed by Iain Sandoe. Iain
+has blessed this contribution in a private communication to me. A
+trivial install fix from Johel Peña is included here, along with typo
+fixes to README.md from Boris Kolpackov (I do not think either of
+those contains copyrightable work).
+
+To the extent that Apache V2 prevents it, you may not update to other
+upstream versions (past or future) of libcody and relicensing and/or
+reassigning copyright of that update without reobtaining the authors'
+permission. You may of course update to other versions of libcody
+but keep the Apache license and libcody's copyright assignment. [This
+is merely pointing out the implications of the Apache V2 license and
+libcody's copyright, not adding additional requirements.]
+
+Any patches applied to this instance should be provided to upstream
+libcody, for consideration there.
diff --git a/libcody/Makefile.in b/libcody/Makefile.in
new file mode 100644
index 0000000..7b8a641
--- /dev/null
+++ b/libcody/Makefile.in
@@ -0,0 +1,187 @@
+# Nathan's generic Makefile -*- mode:Makefile -*-
+# Copyright (C) 2019-2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+ifeq (0,$(MAKELEVEL))
+ifneq (,@tools@)
+$(info Prepending @tools@/bin to PATH)
+PATH := @tools@/bin:$(PATH)
+export PATH
+endif
+ifeq (,$(SERIAL))
+# Figure out if we should set parallelism
+ifeq (,$(filter clean%,$(MAKECMDGOALS)))
+PARALLELISM := @NUM_CPUS@
+endif
+endif
+endif
+
+ifeq (00,$(MAKELEVEL)$(if $(PARALLELISM),0,1))
+# Although Make 4.3 documentation suggests I can set parallelism just
+# by appending to MAKEFLAGS, it doesn't seem to work. It's also not
+# possible to figure out the current Make invocation's parallelism,
+# the -j option doesn't appear in MAKEFLAGS and is magically inserted
+# when that is expanded in a rule. I can't figure how to get a rule
+# expansion into a variable to test. Fortunately, Make propagates an
+# incoming -j option rather than the one you attempted to append
+$(info Parallelizing $(PARALLELISM) ways)
+MAKEFLAGS += -j$(PARALLELISM)
+ifneq (,$(MAKECMDGOALS))
+$(MAKECMDGOALS): recurse
+endif
+recurse:
+ $(MAKE) -r$(MAKEFLAGS) $(MAKECMDGOALS)
+.PHONY: recurse
+else
+
+srcdir := @srcdir@
+prefix := @prefix@
+exec_prefix := @exec_prefix@
+bindir := @bindir@
+libdir := @libdir@
+includedir := @includedir@
+SUBDIRS := @SUBDIRS@
+# autoconf doesn't seem to like setting SHELL
+SHELL := $(shell which zsh 2>/dev/null >/dev/null && echo zsh \
+ || (echo "No ZSH, maybe flakey" >&2 && echo sh))
+
+# We have to place the -I paths last, so that building will see -I paths to us
+CXX := $(filter-out -I%,@CXX@)
+AR := @AR@
+INSTALL := $(srcdir)/build-aux/install-sh
+
+# C++ compiler options
+CXXFLAGS := @CXXFLAGS@
+CXXINC := $(filter -I%,@CXX@)
+CXXOPTS := $(CXXFLAGS)
+ifeq ($(notdir $(firstword $(CXX))),g++)
+# It's GCC, or pretending to be it -- so it better smell like it!
+# Code generation
+CXXOPTS += -fno-enforce-eh-specs
+CXXOPTS += -fno-stack-protector -fno-threadsafe-statics
+ifneq (@EXCEPTIONS@,yes)
+CXXOPTS += -fno-exceptions -fno-rtti
+endif
+ifeq ($(filter -fdebug-prefix-map=%,$(CXXOPTS)),)
+CXXOPTS += -fdebug-prefix-map=${srcdir}/=
+endif
+# Warning options
+CXXOPTS += -W -Wall -Woverloaded-virtual -Wshadow
+CXXOPTS += -Wno-invalid-offsetof -Wno-unused-variable
+CXXOPTS += -Wno-missing-field-initializers
+# Diagnostic options, look at controlling terminal so that piping
+# through more works
+MLEN := $(shell stty size </dev/tty 2>/dev/null | cut -d' ' -f2)
+ifneq (,$(MLEN))
+CXXOPTS += -fmessage-length=$(MLEN)
+endif
+CXXOPTS += -fdiagnostics-color=always -fno-diagnostics-show-option
+else
+ifeq ($(notdir $(firstword $(CXX))),clang++)
+CXXOPTS += -fno-stack-protector -fno-threadsafe-statics
+ifneq (@EXCEPTIONS@,yes)
+CXXOPTS += -fno-exceptions -fno-rtti
+endif
+# Warning options
+CXXOPTS += -W -Wall -Woverloaded-virtual -Wshadow
+CXXOPTS += -Wno-invalid-offsetof -Wno-unused-variable
+CXXOPTS += -Wno-missing-field-initializers
+else
+# Add different compiler's options here
+endif
+endif
+
+# Config
+CXXOPTS += $(filter-out -DHAVE_CONFIG_H,@DEFS@) -include config.h
+
+# Linker options
+LDFLAGS := -L. @LDFLAGS@
+LIBS := @LIBS@
+
+# Per-source & per-directory compile flags (warning: recursive)
+SRC_CXXFLAGS = $(CXXFLAGS$(patsubst $(srcdir)%,%,$1)) \
+ $(if $(filter-out $(srcdir)/,$1),\
+ $(call $0,$(dir $(patsubst %/,%,$1))))
+
+ifneq ($(MAINTAINER),)
+override MAINTAINER += $1
+endif
+ifeq (@MAINTAINER@,yes)
+MAINTAINER = $2
+else
+MAINTAINER = \# --enable-maintainer-mode to rebuild $1, or make MAINTAINER=touch
+endif
+
+vpath %.in $(srcdir)
+vpath %.cc $(srcdir)
+
+.SUFFIXES: .o .cc
+
+%.o: %.cc
+ @mkdir -p $(dir $@)
+ $(CXX) $(strip $(CXXOPTS) $(call SRC_CXXFLAGS,$<) $(CXXINC)) \
+ -MMD -MP -MF ${@:.o=.d} -c -o $@ $<
+
+all:: Makefile $(addprefix all.,$(SUBDIRS))
+
+check:: Makefile $(addprefix check.,$(SUBDIRS))
+
+clean:: Makefile $(addprefix clean.,$(SUBDIRS))
+
+revision.stamp: $(addprefix $(srcdir)/,. $(SUBDIRS))
+ @revision=$$(git -C $(srcdir) rev-parse HEAD 2>/dev/null) ;\
+ if test -n "$$revision" ;\
+ then revision=git-$$revision ;\
+ if git -C $(srcdir) status --porcelain 2>/dev/null | grep -vq '^ ' ;\
+ then revision+=M ;\
+ fi ;\
+ else revision=unknown ;\
+ fi ;\
+ echo $$revision > $@
+
+revision: revision.stamp
+ @cmp -s $< $@ || cp -f $< $@
+
+clean::
+ rm -f revision.stamp revision
+
+distclean:: clean
+ rm -f config.log config.status
+ rm -rf $(SUBDIRS)
+
+$(srcdir)/configure: $(srcdir)/configure.ac \
+ $(patsubst %,$(srcdir)/%/config.m4,. $(SUBDIRS))
+ $(call MAINTAINER,$@,cd $(@D) && autoconf -W all,error)
+
+$(srcdir)/config.h.in: $(srcdir)/configure.ac \
+ $(patsubst %,$(srcdir)/%/config.m4,. $(SUBDIRS))
+ $(call MAINTAINER,$@,cd $(@D) && autoheader -f -W all,error)
+
+config.h: config.status config.h.in
+ ./$< --header=$@
+ touch $@
+
+ifeq ($(filter %clean,$(MAKECMDGOALS)),)
+@CONFIG_FILES@: %: config.status %.in
+ ./$< --file=$@
+ touch $@
+endif
+
+config.status: $(srcdir)/configure $(srcdir)/config.h.in
+ if test -x $@; then ./$@ -recheck; else $< @configure_args@; fi
+
+distclean:: clean
+ rm -f config.h
+
+maintainer-clean:: distclean
+ rm -f $(srcdir)/config.h.in
+
+clean::
+ rm -f $(shell find $(srcdir) -name '*~')
+
+.PHONY: all check clean distclean maintainer-clean
+
+-include $(addsuffix /Makesub,. $(SUBDIRS))
+-include $(addsuffix /tests/Makesub,. $(SUBDIRS))
+
+endif
diff --git a/libcody/Makesub.in b/libcody/Makesub.in
new file mode 100644
index 0000000..0bfe6f6
--- /dev/null
+++ b/libcody/Makesub.in
@@ -0,0 +1,48 @@
+# CODYlib -*- mode:Makefile -*-
+# Copyright (C) 2019-2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+DOXYGEN := @DOXYGEN@
+CXXFLAGS/ := -I$(srcdir)
+LIBCODY.O := buffer.o client.o fatal.o netclient.o netserver.o \
+ resolver.o packet.o server.o
+
+all:: .gdbinit
+
+.gdbinit: gdbinit
+ rm -f $@ ; ln -s $< $@
+
+clean::
+ rm -f gdbinit .gdbinit
+
+all:: libcody.a
+
+libcody.a: $(LIBCODY.O)
+ $(AR) -cr $@ $^
+
+clean::
+ rm -f $(LIBCODY.O) $(LIBCODY.O:.o=.d)
+ rm -f libcody.a
+
+CXXFLAGS/fatal.cc = -DREVISION='"$(shell cat revision)"' -DSRCDIR='"$(srcdir)"'
+
+fatal.o: Makefile revision
+
+doc:: dox.cfg
+ifneq ($(DOXYGEN),:)
+ cd $(<D); $(DOXYGEN) $(<F) >&dox.log
+else
+ @echo "doxygen not present, documentation not built"
+endif
+
+clean::
+ rm -rf dox dox.log
+
+install::
+ $(INSTALL) -d $(libdir) $(includedir)
+ $(INSTALL) libcody.a $(libdir)
+ $(INSTALL) $(srcdir)/cody.hh $(includedir)
+
+ifeq ($(filter clean%,$(MAKECMDGOALS)),)
+-include $(LIBCODY.O:.o=.d)
+endif
diff --git a/libcody/README.md b/libcody/README.md
new file mode 100644
index 0000000..a95085e
--- /dev/null
+++ b/libcody/README.md
@@ -0,0 +1,497 @@
+# libCODY: COmpiler DYnamism<sup><a href="#1">1</a></sup>
+
+Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+
+libCODY is an implementation of a communication protocol between
+compilers and build systems.
+
+**WARNING:** This is preliminary software.
+
+In addition to supporting C++modules, this may also support LTO
+requirements and could also deal with generated #include files
+and feed the compiler with prepruned include paths and whatnot. (The
+system calls involved in include searches can be quite expensive on
+some build infrastructures.)
+
+* Client and Server objects
+* Direct connection for in-process use
+* Testing with Joust (that means nothing to you, doesn't it!)
+
+
+## Problem Being Solved
+
+The origin is in C++20 modules:
+```
+import foo;
+```
+
+At that import, the compiler needs<sup><a href="#2">2</a></sup> to
+load up the compiled serialization of module `foo`. Where is that
+file? Does it even exist? Unless the build system already knows the
+dependency graph, this might be a completely unknown module. Now, the
+build system knows how to build things, but it might not have complete
+information about the dependencies. The ultimate source of
+dependencies is the source code being compiled, and specifying the
+same thing in multiple places is a recipe for build skew.
+
+Hence, a protocol by which a compiler can query a build system. This
+was originally described in <a
+href="https://wg21.link/p1184r1">p1184r1:A Module Mapper</a>. Along
+with a proof-of-concept hack in GNUmake, described in <a
+href="https://wg21.link/p1602">p1602:Make Me A Module</a>. The current
+implementation has evolved and an update to p1184 will be forthcoming.
+
+## Packet Encoding
+
+The protocol is turn-based. The compiler sends a block of one or more
+requests to the builder, then waits for a block of responses to all of
+those requests. If the builder needs to compile something to satisfy
+a request, there may be some time before the response. A builder may
+service multiple compilers concurrently, each as a separate connection.
+
+When multiple requests are in a block, the responses are also in a
+block, and in corresponding order. The responses must not be
+commenced eagerly -- they must wait until the incoming block has ended
+(as mentioned above, it is turn-based). To do otherwise risks
+deadlock, as there is no requirement for a sending end of the
+communication to listen for incoming responses (or new requests) until
+it has completed sending its current block.
+
+Every request has a response.
+
+Requests and responses are user-readable text. It is not intended as
+a transmission medium to send large binary objects (such as compiled
+modules). It is presumed the builder and the compiler share a file
+system, for that kind of thing.<sup><a href="#3">3</a></sup>
+
+Messages characters are encoded in UTF8.
+
+Messages are a sequence of octets ending with a NEWLINE (0xa). The lines
+consist of a sequence of words, separated by WHITESPACE (0x20 or 0x9).
+Words themselves do not contain WHITESPACE. Lines consisting solely
+of WHITESPACE (or empty) are ignored.
+
+To encode a block of multiple messages, non-final messages end with a
+single word of SEMICOLON (0x3b), immediately before the NEWLINE. Thus
+a serial connection can determine whether a block is complete without
+decoding the messages.
+
+Words containing characters in the set [-+_/%.A-Za-z0-9] need not be
+quoted. Words containing characters outside that set should be
+quoted. A zero-length word may be achieved with `''`
+
+Quoted words begin and end with APOSTROPHE (x27). Within the quoted
+word, BACKSLASH (x5c) is used as an escape mechanism, with the
+following meanings:
+
+* \\n - NEWLINE (0xa)
+* \\t - TAB (0x9)
+* \\' - APOSTROPHE (')
+* \\\\ - BACKSLASH (\\)
+
+Characters in the range [0x00, 0x20) and 0x7f are encoded with one or
+two lowercase hex characters. Octets in the range [0x80,0xff) are
+UTF8 encodings of unicode characters outside the traditional ASCII set
+and passed as such.
+
+Decoding should be more relaxed. Unquoted words containing characters
+in the range [0x20,0xff] other than BACKSLASH or APOSTROPHE should be
+accepted. In a quoted sequence, `\` followed by one or two lower case
+hex characters decode to that octet. Further, words can be
+constructed from a mixture of abutted quoted and unquoted sequences.
+For instance `FOO' 'bar` would decode to the word `FOO bar`.
+
+Notice that the block continuation marker of `;` is not a valid
+encoding of the word `;`, which would be `';'`.
+
+It is recommended that words are separated by single SPACE characters.
+
+## Messages
+
+The message descriptions use `$metavariable` examples.
+
+The request messages are specific to a particular action. The response
+messages are more generic, describing their value types, but not their
+meaning. Message consumers need to know the response to decode them.
+Notice the `Packet::GetRequest()` method records in response packets
+what the request being responded to was. Do not confuse this with the
+`Packet::GetCode ()` method.
+
+### Responses
+
+The simplest response is a single:
+
+`OK`
+
+This indicates the request was successful.
+
+
+An error response is:
+
+`ERROR $message`
+
+The message is a human-readable string. It indicates failure of the request.
+
+Pathnames are encoded with:
+
+`PATHNAME $pathname`
+
+Boolean responses use:
+
+`BOOL `(`TRUE`|`FALSE`)
+
+### Handshake Request
+
+The first message is a handshake:
+
+`HELLO $version $compiler $ident`
+
+The `$version` is a numeric value, currently `1`. `$compiler` identifies
+the compiler &mdash; builders may need to keep compiled modules from
+different compilers separate. `$ident` is an identifier the builder
+might use to identify the compilation it is communicating with.
+
+Responses are:
+
+`HELLO $version $builder [$flags]`
+
+A successful handshake. The communication is now connected and other
+messages may be exchanged. An ERROR response indicates an unsuccessful
+handshake. The communication remains unconnected.
+
+There is nothing restricting a handshake to its own message block. Of
+course, if the handshake fails, subsequent non-handshake messages in
+the block will fail (producing error responses).
+
+The `$flags` word, if present allows a server to control what requests
+might be given. See below.
+
+### C++ Module Requests
+
+A set of requests are specific to C++ modules:
+
+#### Flags
+
+Several requests and one response have an optional `$flags` word.
+These are the `Cody::Flags` value pertaining to that request. If
+omitted the value 0 is implied. The following flags are available:
+
+* `0`, `None`: No flags.
+
+* `1<<0`, `NameOnly`: The request is for the name only, and not the
+ CMI contents.
+
+The `NameOnly` flag may be provded in a handshake response, and
+indicates that the server is interested in requests only for their
+implied dependency information. It may be provided on a request to
+indicate that only the CMI name is required, not its contents (for
+instance, when preprocessing). Note that a compiler may still make
+`NameOnly` requests even if the server did not ask for such.
+
+#### Repository
+
+All relative CMI file names are relative to a repository. (There are
+usually no absolute CMI files). The repository may be determined
+with:
+
+`MODULE-REPO`
+
+A PATHNAME response is expected. The `$pathname` may be an empty
+word, which is equivalent to `.`. When the response is a relative
+pathname, it must be relative to the client's current working
+directory (which might be a process on a different host to the
+server). You may set the repository to `/`, if you with to use paths
+relative to the root directory.
+
+#### Exporting
+
+A compilation of a module interface, partition or header unit can
+inform the builder with:
+
+`MODULE-EXPORT $module [$flags]`
+
+This will result in a PATHNAME response naming the Compiled Module
+Interface pathname to write.
+
+The `MODULE-EXPORT` request does not indicate the module has been
+successfully compiled. At most one `MODULE-EXPORT` is to be made, and
+as the connection is for a single compilation, the builder may infer
+dependency relationships between the module being generated and import
+requests made.
+
+Named module names and header unit names are distinguished by making
+the latter unambiguously look like file names. Firstly, they must be
+fully resolved according to the compiler's usual include path. If
+that results in an absolute name file name (beginning with `/`, or
+certain other OS-specific sequences), all is well. Otherwise a
+relative file name must be prefixed by `./` to be distinguished from a
+similarly named named module. This prefixing must occur, even if the
+header-unit's name contains characters that cannot appear in a named
+module's name.
+
+It is expected that absolute header-unit names convert to relative CMI
+names, to keep all CMIs within the CMI repository. This means that
+steps must be taken to distinguish the CMIs for `/here` from `./here`,
+and this can be achieved by replacing the leading `./` directory with
+`,/`, which is visually similar but does not have the self-reference
+semantics of dot. Likewise, header-unit names containing `..`
+directories, can be remapped to `,,`. (When symlinks are involved
+`bob/dob/..` might not be `bob`, of course.) C++ header-unit
+semantics are such that there is no need to resolve multiple ways of
+spelling a particular header-unit to a unique CMI file.
+
+Successful compilation of an interface is indicated with a subsequent:
+
+`MODULE-COMPILED $module [$flags]`
+
+request. This indicates the CMI file has been written to disk, so
+that any other compilations waiting on it may proceed. Depending on
+compiler implementation, the CMI may be written before the compilation
+completes. A single OK response is expected.
+
+Compilation failure can be inferred by lack of a `MODULE-COMPILED`
+request. It is presumed the builder can determine this, as it is also
+responsible for launching and reaping the compiler invocations
+themselves.
+
+#### Importing
+
+Importation, including that of header-units, uses:
+
+`MODULE-IMPORT $module [$flags]`
+
+A PATHNAME response names the CMI file to be read. Should the builder
+have to invoke a compilation to produce the CMI, the response should
+be delayed until that occurs. If such a compilation fails, an error
+response should be provided to the requestor &mdash; which will then
+presumably fail in some manner.
+
+#### Include Translation
+
+Include translation can be determined with:
+
+`INCLUDE-TRANSLATE $header [$flags]`
+
+The header name, `$header`, is the fully resolved header name, in the
+above-mentioned unambiguous filename form. The response will either
+be a BOOL response indicating textual inclusion, or a PATHNAME
+response naming the CMI for such translation. The BOOL value is TRUE,
+if the header is known to be a textual header, and FALSE if nothing is
+known about it -- the latter might cause diagnostics about incomplete
+knowledge.
+
+### GCC LTO Messages
+
+These set of requests are used for GCC LTO jobserver integration with GNU Make
+
+## Building libCody
+
+Libcody is written in C++11. (It's a intended for compilers, so
+there'd be a bootstrapping problem if it used the latest and greatest.)
+
+### Using configure and make.
+
+It supports the usual `configure`, `make`, `make check` & `make install`
+sequence. It does not support building in the source directory --
+that just didn't drop out, and it's not how I build things (because,
+again, for compilers). Excitingly it uses my own `joust` test
+harness, so you'll need to build and install that somewhere, if you
+want the comfort of testing.
+
+The following configure options are available, in addition to the usual set:
+
+* `--enable-checking` Compile with assert-like checking. Defaults to on.
+
+* `--with-tooldir=DIR` Prepend `DIR` to `PATH` when building (`DIR`
+ need not already include the trailing `/bin`, and the right things
+ happen). Use this if you need to point to non-standard tools that
+ you usually don't have in your path. This path is also used when
+ the configure script searches for programs.
+
+* `--with-toolinc=DIR`, `--with-toollib=DIR`, include path and library
+ path variants of `--with-tooldir`. If these are siblings of the
+ tool bin directory, they'll be found automatically.
+
+* `--with-compiler=NAME` Specify a particular compiler to use.
+ Usually what configure finds is sufficiently usable.
+
+* `--with-bugurl=URL` Override the bugreporting URL. Do this if
+ you're providing libcody as part of a package that /you/ are
+ supporting.
+
+* `--enable-maintainer-mode` Specify that rules to rebuild things like
+ `configure` (with `autoconf`) should be enabled. When not enabled,
+ you'll get a message if these appear out of date, but that can
+ happen naturally after an update or clone as `git`, in common with
+ other VCs, doesn't preserve the relative ordering of file
+ modifications. You can use `make MAINTAINER=touch` to shut make up,
+ if this occurs (or manually execute the `autoconf` and related
+ commands).
+
+When building, you can override the default optimization flags with
+`CXXFLAGS=$flags`. I often build a debuggable library with `make
+CXXFLAGS=-g3`.
+
+The `Makefile` will also parallelize according to the number of CPUs,
+unless you specify explicitly with a `-j` option. This is a little
+clunky, as it's not possible to figure out inside the makefile whether
+the user provided `-j`. (Or at least I've not figured out how.)
+
+### Using cmake and make
+
+#### In the clang/LLVM project
+
+The primary motivation for a cmake implementation is to allow building
+libcody "in tree" in clang/LLVM. In that case, a checkout of libcody
+can be placed (or symbolically linked) into clang/tools. This will
+configure and build the library along with other LLVM dependencies.
+
+*NOTE* This is not treated as an installable entity (it is present only
+for use by the project).
+
+*NOTE* The testing targets would not be appropriate in this configuration;
+it is expected that lit-based testing of the required functionality will be
+done by the code using the library.
+
+#### Stand-alone
+
+For use on platforms that don't support configure & make effectively, it
+is possible to use the cmake & make process in stand-alone mode (similar
+to the configure & make process above).
+
+An example use.
+```
+cmake -DCMAKE_INSTALL_PREFIX=/path/to/installation -DCMAKE_CXX_COMPILER=clang++ /path/to/libcody/source
+make
+make install
+```
+Supported flags (additions to the usual cmake ones).
+
+* `-DCODY_CHECKING=ON,OFF`: Compile with assert-like checking. (defaults ON)
+
+* `-DCODY_WITHEXCEPTIONS=ON,OFF`: Compile with C++ exceptions and RTTI enabled.
+(defaults OFF, to be compatible with GCC and LLVM).
+
+*TODO*: At present there is no support for `ctest` integration (this should be
+feasible, provided that `joust` is installed and can be discovered by `cmake`).
+
+## API
+
+The library defines entities in the `::Cody` namespace.
+
+There are 4 user-visible classes:
+
+* `Packet`: Responses to requests are `Packets`. These have a code,
+ indicating the response kind, and a payload.
+
+* `Client`: The compiler-end of a connection. Requests may be made
+ and responses are returned.
+
+* `Server`: The builder-end of a connection. Requests may be waited
+ for, and responses made. Builders that serve multiple concurrent
+ connections and spawn compilations to resolve dependencies may need
+ to derive from this class to provide response queuing.
+
+* `Resolver`: The processing engine of the builder side. User code is
+ expected to derive from this class and provide virtual function
+ overriders to affect the semantics of the resolver.
+
+In addition there are a number of helpers to setup connections.
+
+Logically the Client and the Server communicate via a sequential
+channel. The channel may be provided by:
+
+* two pipes, with different file descriptors for reading and writing
+ at each end.
+
+* a socket, which will use the same file descriptor for reading and
+ writing. the socket can be created in a number of ways, including
+ Unix domain and IPv6 TCP, for which helpers are provided.
+
+* a direct, in-process, connection, using buffer swapping.
+
+The communication channel is presumed reliable.
+
+Refer to the (currently very sparse) doxygen-generated documentation
+for details of the API.
+
+## Examples
+
+To create an in-process resolver, use the following boilerplate:
+
+```
+class MyResolver : Cody::Resolver { ... stuff here ... };
+
+Cody::Client *MakeClient (char const *maybe_ident)
+{
+ auto *r = new MyResolver (...);
+ auto *s = new Cody::Server (r);
+ auto *c = new Cody::Client (s);
+
+ auto t = c->ConnectRequest ("ME", maybe_ident);
+ if (t.GetCode () == Cody::Client::TC_CONNECT)
+ ;// Yay!
+ else if (t.GetCode () == Cody::Client::TC_ERROR)
+ report_error (t.GetString ());
+
+ return c;
+}
+
+```
+
+For a remotely connecting client:
+```
+Cody::Client *MakeClient ()
+{
+ char const *err = nullptr;
+ int fd = OpenInet6 (char const **err, name, port);
+ if (fd < 0)
+ { ... error... return nullptr;}
+
+ auto *c = new Cody::Client (fd);
+
+ auto t = c->ConnectRequest ("ME", maybe_ident);
+ if (t.GetCode () == Cody::Client::TC_CONNECT)
+ ;// Yay!
+ else if (t.GetCode () == Cody::Client::TC_ERROR)
+ report_error (t.GetString ());
+
+ return c;
+}
+```
+
+# Future Directions
+
+* Current Directory. There is no mechanism to check the builder and
+ the compiler have the same working directory. Perhaps that should
+ be addressed.
+
+* Include path canonization and/or header file lookup. This can be
+ expensive, particularly with many `-I` options, due to the system
+ calls. Perhaps using a common resource would be cheaper?
+
+* Generated header file lookup/construction. This is essentially the
+ same problem as importing a module, and build systems are crap at
+ dealing with this.
+
+* Link-time compilations. Another place the compiler would like to
+ ask the build system to do things.
+
+* C++20 API entrypoints &mdash; std:string_view would be nice
+
+* Exception-safety audit. Exceptions are not used, but memory
+ exhaustion could happen. And perhaps user's resolver code employs
+ exceptions?
+
+<a name="1">1</a>: Or a small town in Wyoming
+
+<a name="2">2</a>: This describes one common implementation technique.
+The std itself doesn't require such serializations, but the ability to
+create them is kind of the point. Also, 'compiler' is used where we
+mean any consumer of a module, and 'build system' where we mean any
+producer of a module.
+
+<a name="3">3</a>: Even when the builder is managing a distributed set
+of compilations, the builder must have a mechanism to get source files
+to, and object files from, the compilations. That scheme can also
+transfer the CMI files.
diff --git a/libcody/buffer.cc b/libcody/buffer.cc
new file mode 100644
index 0000000..52df317
--- /dev/null
+++ b/libcody/buffer.cc
@@ -0,0 +1,387 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+// C++
+#include <algorithm>
+// C
+#include <cstring>
+// OS
+#include <unistd.h>
+#include <cerrno>
+
+// MessageBuffer code
+
+// Lines consist of words and end with a NEWLINE (0xa) char
+// Whitespace characters are TAB (0x9) and SPACE (0x20)
+// Words consist of non-whitespace chars separated by whitespace.
+// Multiple lines in one transaction are indicated by ending non-final
+// lines with a SEMICOLON (0x3b) word, immediately before the NEWLINE
+// Continuations with ; preceding it
+// Words matching regexp [-+_/%.a-zA-Z0-9]+ need no quoting.
+// Quoting with '...'
+// Anything outside of [-+_/%.a-zA-Z0-9] needs quoting
+// Anything outside of <= <space> or DEL or \' or \\ needs escaping.
+// Escapes are \\, \', \n, \t, \_, everything else as \<hex><hex>?
+// Spaces separate words, UTF8 encoding for non-ascii chars
+
+namespace Cody {
+namespace Detail {
+
+static const char CONTINUE = S2C(u8";");
+
+void MessageBuffer::BeginLine ()
+{
+ if (!buffer.empty ())
+ {
+ // Terminate the previous line with a continuation
+ buffer.reserve (buffer.size () + 3);
+ buffer.push_back (S2C(u8" "));
+ buffer.push_back (CONTINUE);
+ buffer.push_back (S2C(u8"\n"));
+ }
+ lastBol = buffer.size ();
+}
+
+// QUOTE means 'maybe quote', we search it for quote-needing chars
+
+void MessageBuffer::Append (char const *str, bool quote, size_t len)
+{
+ if (len == ~size_t (0))
+ len = strlen (str);
+
+ if (!len && !quote)
+ return;
+
+ // We want to quote characters outside of [-+_A-Za-z0-9/%.], anything
+ // that could remotely be shell-active. UTF8 encoding for non-ascii.
+ if (quote && len)
+ {
+ quote = false;
+ // Scan looking for quote-needing characters. We could just
+ // append until we find one, but that's probably confusing
+ for (size_t ix = len; ix--;)
+ {
+ unsigned char c = (unsigned char)str[ix];
+ if (!((c >= S2C(u8"a") && c <= S2C(u8"z"))
+ || (c >= S2C(u8"A") && c <= S2C(u8"Z"))
+ || (c >= S2C(u8"0") && c <= S2C(u8"9"))
+ || c == S2C(u8"-") || c == S2C(u8"+") || c == S2C(u8"_")
+ || c == S2C(u8"/") || c == S2C(u8"%") || c == S2C(u8".")))
+ {
+ quote = true;
+ break;
+ }
+ }
+ }
+
+ // Maximal length of appended string
+ buffer.reserve (buffer.size () + len * (quote ? 3 : 1) + 2);
+
+ if (quote)
+ buffer.push_back (S2C(u8"'"));
+
+ for (auto *end = str + len; str != end;)
+ {
+ auto *e = end;
+
+ if (quote)
+ // Look for next escape-needing char. More relaxed than
+ // the earlier needs-quoting check.
+ for (e = str; e != end; ++e)
+ {
+ unsigned char c = (unsigned char)*e;
+ if (c < S2C(u8" ") || c == 0x7f
+ || c == S2C(u8"\\") || c == S2C(u8"'"))
+ break;
+ }
+ buffer.insert (buffer.end (), str, e);
+ str = e;
+
+ if (str == end)
+ break;
+
+ buffer.push_back (S2C(u8"\\"));
+ switch (unsigned char c = (unsigned char)*str++)
+ {
+ case S2C(u8"\t"):
+ c = S2C(u8"t");
+ goto append;
+
+ case S2C(u8"\n"):
+ c = S2C(u8"n");
+ goto append;
+
+ case S2C(u8"'"):
+ case S2C(u8"\\"):
+ append:
+ buffer.push_back (c);
+ break;
+
+ default:
+ // Full-on escape. Use 2 lower-case hex chars
+ for (unsigned shift = 8; shift;)
+ {
+ shift -= 4;
+
+ char nibble = (c >> shift) & 0xf;
+ nibble += S2C(u8"0");
+ if (nibble > S2C(u8"9"))
+ nibble += S2C(u8"a") - (S2C(u8"9") + 1);
+ buffer.push_back (nibble);
+ }
+ }
+ }
+
+ if (quote)
+ buffer.push_back (S2C(u8"'"));
+}
+
+void MessageBuffer::Append (char c)
+{
+ buffer.push_back (c);
+}
+
+void MessageBuffer::AppendInteger (unsigned u)
+{
+ std::string v (std::to_string (u));
+ AppendWord (v);
+}
+
+int MessageBuffer::Write (int fd) noexcept
+{
+ size_t limit = buffer.size () - lastBol;
+ ssize_t count = write (fd, &buffer.data ()[lastBol], limit);
+
+ int err = 0;
+ if (count < 0)
+ err = errno;
+ else
+ {
+ lastBol += count;
+ if (size_t (count) != limit)
+ err = EAGAIN;
+ }
+
+ if (err != EAGAIN && err != EINTR)
+ {
+ // Reset for next message
+ buffer.clear ();
+ lastBol = 0;
+ }
+
+ return err;
+}
+
+int MessageBuffer::Read (int fd) noexcept
+{
+ constexpr size_t blockSize = 200;
+
+ size_t lwm = buffer.size ();
+ size_t hwm = buffer.capacity ();
+ if (hwm - lwm < blockSize / 2)
+ hwm += blockSize;
+ buffer.resize (hwm);
+
+ auto iter = buffer.begin () + lwm;
+ ssize_t count = read (fd, &*iter, hwm - lwm);
+ buffer.resize (lwm + (count >= 0 ? count : 0));
+
+ if (count < 0)
+ return errno;
+
+ if (!count)
+ // End of file
+ return -1;
+
+ bool more = true;
+ for (;;)
+ {
+ auto newline = std::find (iter, buffer.end (), S2C(u8"\n"));
+ if (newline == buffer.end ())
+ break;
+ more = newline != buffer.begin () && newline[-1] == CONTINUE;
+ iter = newline + 1;
+
+ if (iter == buffer.end ())
+ break;
+
+ if (!more)
+ {
+ // There is no continuation, but there are chars after the
+ // newline. Truncate the buffer and return an error
+ buffer.resize (iter - buffer.begin ());
+ return EINVAL;
+ }
+ }
+
+ return more ? EAGAIN : 0;
+}
+
+int MessageBuffer::Lex (std::vector<std::string> &result)
+{
+ result.clear ();
+
+ int err = ENOENT;
+ if (IsAtEnd ())
+ return ENOENT;
+
+ Assert (buffer.back () == S2C(u8"\n"));
+
+ auto iter = buffer.begin () + lastBol;
+
+ for (std::string *word = nullptr;;)
+ {
+ char c = *iter;
+
+ ++iter;
+ if (c == S2C(u8" ") || c == S2C(u8"\t"))
+ {
+ word = nullptr;
+ continue;
+ }
+
+ if (c == S2C(u8"\n"))
+ break;
+
+ if (c == CONTINUE)
+ {
+ // Line continuation
+ if (word || *iter != S2C(u8"\n"))
+ goto malformed;
+ ++iter;
+ break;
+ }
+
+ if (c <= S2C(u8" ") || c >= 0x7f)
+ goto malformed;
+
+ if (!word)
+ {
+ result.emplace_back ();
+ word = &result.back ();
+ }
+
+ if (c == S2C(u8"'"))
+ {
+ // Quoted word
+ for (;;)
+ {
+ c = *iter;
+
+ if (c == S2C(u8"\n"))
+ {
+ malformed:;
+ result.clear ();
+ iter = std::find (iter, buffer.end (), S2C(u8"\n"));
+ auto back = iter;
+ if (back[-1] == CONTINUE && back[-2] == S2C(u8" "))
+ // Smells like a line continuation
+ back -= 2;
+ result.emplace_back (&buffer[lastBol],
+ back - buffer.begin () - lastBol);
+ ++iter;
+ lastBol = iter - buffer.begin ();
+ return EINVAL;
+ }
+
+ if (c < S2C(u8" ") || c >= 0x7f)
+ goto malformed;
+
+ ++iter;
+ if (c == S2C(u8"'"))
+ break;
+
+ if (c == S2C(u8"\\"))
+ // escape
+ switch (c = *iter)
+ {
+ case S2C(u8"\\"):
+ case S2C(u8"'"):
+ ++iter;
+ break;
+
+ case S2C(u8"n"):
+ c = S2C(u8"\n");
+ ++iter;
+ break;
+
+ case S2C(u8"_"):
+ // We used to escape SPACE as \_, so accept that
+ c = S2C(u8" ");
+ ++iter;
+ break;
+
+ case S2C(u8"t"):
+ c = S2C(u8"\t");
+ ++iter;
+ break;
+
+ default:
+ {
+ unsigned v = 0;
+ for (unsigned nibble = 0; nibble != 2; nibble++)
+ {
+ c = *iter;
+ if (c < S2C(u8"0"))
+ {
+ if (!nibble)
+ goto malformed;
+ break;
+ }
+ else if (c <= S2C(u8"9"))
+ c -= S2C(u8"0");
+ else if (c < S2C(u8"a"))
+ {
+ if (!nibble)
+ goto malformed;
+ break;
+ }
+ else if (c <= S2C(u8"f"))
+ c -= S2C(u8"a") - 10;
+ else
+ {
+ if (!nibble)
+ goto malformed;
+ break;
+ }
+ ++iter;
+ v = (v << 4) | c;
+ }
+ c = v;
+ }
+ }
+ word->push_back (c);
+ }
+ }
+ else
+ // Unquoted character
+ word->push_back (c);
+ }
+ lastBol = iter - buffer.begin ();
+ if (result.empty ())
+ return ENOENT;
+
+ return 0;
+}
+
+void MessageBuffer::LexedLine (std::string &str)
+{
+ if (lastBol)
+ {
+ size_t pos = lastBol - 1;
+ for (; pos; pos--)
+ if (buffer[pos-1] == S2C(u8"\n"))
+ break;
+
+ size_t end = lastBol - 1;
+ if (buffer[end-1] == CONTINUE && buffer[end-2] == S2C(u8" "))
+ // Strip line continuation
+ end -= 2;
+ str.append (&buffer[pos], end - pos);
+ }
+}
+} // Detail
+} // Cody
diff --git a/libcody/build-aux/config.guess b/libcody/build-aux/config.guess
new file mode 100755
index 0000000..256083a
--- /dev/null
+++ b/libcody/build-aux/config.guess
@@ -0,0 +1,1476 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+# Copyright 1992-2018 Free Software Foundation, Inc.
+
+timestamp='2018-03-08'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program. This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+#
+# Originally written by Per Bothner; maintained since 2000 by Ben Elliston.
+#
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess
+#
+# Please send patches to <config-patches@gnu.org>.
+
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Options:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright 1992-2018 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help" >&2
+ exit 1 ;;
+ * )
+ break ;;
+ esac
+done
+
+if test $# != 0; then
+ echo "$me: too many arguments$help" >&2
+ exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,) echo "int x;" > "$dummy.c" ;
+ for c in cc gcc c89 c99 ; do
+ if ($c -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then
+ CC_FOR_BUILD="$c"; break ;
+ fi ;
+ done ;
+ if test x"$CC_FOR_BUILD" = x ; then
+ CC_FOR_BUILD=no_compiler_found ;
+ fi
+ ;;
+ ,,*) CC_FOR_BUILD=$CC ;;
+ ,*,*) CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+ PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+case "$UNAME_SYSTEM" in
+Linux|GNU|GNU/*)
+ # If the system lacks a compiler, then just pick glibc.
+ # We could probably try harder.
+ LIBC=gnu
+
+ eval "$set_cc_for_build"
+ cat <<-EOF > "$dummy.c"
+ #include <features.h>
+ #if defined(__UCLIBC__)
+ LIBC=uclibc
+ #elif defined(__dietlibc__)
+ LIBC=dietlibc
+ #else
+ LIBC=gnu
+ #endif
+ EOF
+ eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`"
+
+ # If ldd exists, use it to detect musl libc.
+ if command -v ldd >/dev/null && \
+ ldd --version 2>&1 | grep -q ^musl
+ then
+ LIBC=musl
+ fi
+ ;;
+esac
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "$UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION" in
+ *:NetBSD:*:*)
+ # NetBSD (nbsd) targets should (where applicable) match one or
+ # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
+ # switched to ELF, *-*-netbsd* would select the old
+ # object file format. This provides both forward
+ # compatibility and a consistent mechanism for selecting the
+ # object file format.
+ #
+ # Note: NetBSD doesn't particularly care about the vendor
+ # portion of the name. We always set it to "unknown".
+ sysctl="sysctl -n hw.machine_arch"
+ UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \
+ "/sbin/$sysctl" 2>/dev/null || \
+ "/usr/sbin/$sysctl" 2>/dev/null || \
+ echo unknown)`
+ case "$UNAME_MACHINE_ARCH" in
+ armeb) machine=armeb-unknown ;;
+ arm*) machine=arm-unknown ;;
+ sh3el) machine=shl-unknown ;;
+ sh3eb) machine=sh-unknown ;;
+ sh5el) machine=sh5le-unknown ;;
+ earmv*)
+ arch=`echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,'`
+ endian=`echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p'`
+ machine="${arch}${endian}"-unknown
+ ;;
+ *) machine="$UNAME_MACHINE_ARCH"-unknown ;;
+ esac
+ # The Operating System including object format, if it has switched
+ # to ELF recently (or will in the future) and ABI.
+ case "$UNAME_MACHINE_ARCH" in
+ earm*)
+ os=netbsdelf
+ ;;
+ arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+ eval "$set_cc_for_build"
+ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ELF__
+ then
+ # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+ # Return netbsd for either. FIX?
+ os=netbsd
+ else
+ os=netbsdelf
+ fi
+ ;;
+ *)
+ os=netbsd
+ ;;
+ esac
+ # Determine ABI tags.
+ case "$UNAME_MACHINE_ARCH" in
+ earm*)
+ expr='s/^earmv[0-9]/-eabi/;s/eb$//'
+ abi=`echo "$UNAME_MACHINE_ARCH" | sed -e "$expr"`
+ ;;
+ esac
+ # The OS release
+ # Debian GNU/NetBSD machines have a different userland, and
+ # thus, need a distinct triplet. However, they do not need
+ # kernel version information, so it can be replaced with a
+ # suitable tag, in the style of linux-gnu.
+ case "$UNAME_VERSION" in
+ Debian*)
+ release='-gnu'
+ ;;
+ *)
+ release=`echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2`
+ ;;
+ esac
+ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+ # contains redundant information, the shorter form:
+ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+ echo "$machine-${os}${release}${abi}"
+ exit ;;
+ *:Bitrig:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'`
+ echo "$UNAME_MACHINE_ARCH"-unknown-bitrig"$UNAME_RELEASE"
+ exit ;;
+ *:OpenBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+ echo "$UNAME_MACHINE_ARCH"-unknown-openbsd"$UNAME_RELEASE"
+ exit ;;
+ *:LibertyBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'`
+ echo "$UNAME_MACHINE_ARCH"-unknown-libertybsd"$UNAME_RELEASE"
+ exit ;;
+ *:MidnightBSD:*:*)
+ echo "$UNAME_MACHINE"-unknown-midnightbsd"$UNAME_RELEASE"
+ exit ;;
+ *:ekkoBSD:*:*)
+ echo "$UNAME_MACHINE"-unknown-ekkobsd"$UNAME_RELEASE"
+ exit ;;
+ *:SolidBSD:*:*)
+ echo "$UNAME_MACHINE"-unknown-solidbsd"$UNAME_RELEASE"
+ exit ;;
+ macppc:MirBSD:*:*)
+ echo powerpc-unknown-mirbsd"$UNAME_RELEASE"
+ exit ;;
+ *:MirBSD:*:*)
+ echo "$UNAME_MACHINE"-unknown-mirbsd"$UNAME_RELEASE"
+ exit ;;
+ *:Sortix:*:*)
+ echo "$UNAME_MACHINE"-unknown-sortix
+ exit ;;
+ *:Redox:*:*)
+ echo "$UNAME_MACHINE"-unknown-redox
+ exit ;;
+ mips:OSF1:*.*)
+ echo mips-dec-osf1
+ exit ;;
+ alpha:OSF1:*:*)
+ case $UNAME_RELEASE in
+ *4.0)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+ ;;
+ *5.*)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+ ;;
+ esac
+ # According to Compaq, /usr/sbin/psrinfo has been available on
+ # OSF/1 and Tru64 systems produced since 1995. I hope that
+ # covers most systems running today. This code pipes the CPU
+ # types through head -n 1, so we only detect the type of CPU 0.
+ ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+ case "$ALPHA_CPU_TYPE" in
+ "EV4 (21064)")
+ UNAME_MACHINE=alpha ;;
+ "EV4.5 (21064)")
+ UNAME_MACHINE=alpha ;;
+ "LCA4 (21066/21068)")
+ UNAME_MACHINE=alpha ;;
+ "EV5 (21164)")
+ UNAME_MACHINE=alphaev5 ;;
+ "EV5.6 (21164A)")
+ UNAME_MACHINE=alphaev56 ;;
+ "EV5.6 (21164PC)")
+ UNAME_MACHINE=alphapca56 ;;
+ "EV5.7 (21164PC)")
+ UNAME_MACHINE=alphapca57 ;;
+ "EV6 (21264)")
+ UNAME_MACHINE=alphaev6 ;;
+ "EV6.7 (21264A)")
+ UNAME_MACHINE=alphaev67 ;;
+ "EV6.8CB (21264C)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8AL (21264B)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8CX (21264D)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.9A (21264/EV69A)")
+ UNAME_MACHINE=alphaev69 ;;
+ "EV7 (21364)")
+ UNAME_MACHINE=alphaev7 ;;
+ "EV7.9 (21364A)")
+ UNAME_MACHINE=alphaev79 ;;
+ esac
+ # A Pn.n version is a patched version.
+ # A Vn.n version is a released version.
+ # A Tn.n version is a released field test version.
+ # A Xn.n version is an unreleased experimental baselevel.
+ # 1.2 uses "1.2" for uname -r.
+ echo "$UNAME_MACHINE"-dec-osf"`echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`"
+ # Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+ exitcode=$?
+ trap '' 0
+ exit $exitcode ;;
+ Amiga*:UNIX_System_V:4.0:*)
+ echo m68k-unknown-sysv4
+ exit ;;
+ *:[Aa]miga[Oo][Ss]:*:*)
+ echo "$UNAME_MACHINE"-unknown-amigaos
+ exit ;;
+ *:[Mm]orph[Oo][Ss]:*:*)
+ echo "$UNAME_MACHINE"-unknown-morphos
+ exit ;;
+ *:OS/390:*:*)
+ echo i370-ibm-openedition
+ exit ;;
+ *:z/VM:*:*)
+ echo s390-ibm-zvmoe
+ exit ;;
+ *:OS400:*:*)
+ echo powerpc-ibm-os400
+ exit ;;
+ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+ echo arm-acorn-riscix"$UNAME_RELEASE"
+ exit ;;
+ arm*:riscos:*:*|arm*:RISCOS:*:*)
+ echo arm-unknown-riscos
+ exit ;;
+ SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+ echo hppa1.1-hitachi-hiuxmpp
+ exit ;;
+ Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+ # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+ if test "`(/bin/universe) 2>/dev/null`" = att ; then
+ echo pyramid-pyramid-sysv3
+ else
+ echo pyramid-pyramid-bsd
+ fi
+ exit ;;
+ NILE*:*:*:dcosx)
+ echo pyramid-pyramid-svr4
+ exit ;;
+ DRS?6000:unix:4.0:6*)
+ echo sparc-icl-nx6
+ exit ;;
+ DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+ case `/usr/bin/uname -p` in
+ sparc) echo sparc-icl-nx7; exit ;;
+ esac ;;
+ s390x:SunOS:*:*)
+ echo "$UNAME_MACHINE"-ibm-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`"
+ exit ;;
+ sun4H:SunOS:5.*:*)
+ echo sparc-hal-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`"
+ exit ;;
+ sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+ echo sparc-sun-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`"
+ exit ;;
+ i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+ echo i386-pc-auroraux"$UNAME_RELEASE"
+ exit ;;
+ i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+ eval "$set_cc_for_build"
+ SUN_ARCH=i386
+ # If there is a compiler, see if it is configured for 64-bit objects.
+ # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+ # This test works for both compilers.
+ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then
+ if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ SUN_ARCH=x86_64
+ fi
+ fi
+ echo "$SUN_ARCH"-pc-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`"
+ exit ;;
+ sun4*:SunOS:6*:*)
+ # According to config.sub, this is the proper way to canonicalize
+ # SunOS6. Hard to guess exactly what SunOS6 will be like, but
+ # it's likely to be more like Solaris than SunOS4.
+ echo sparc-sun-solaris3"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`"
+ exit ;;
+ sun4*:SunOS:*:*)
+ case "`/usr/bin/arch -k`" in
+ Series*|S4*)
+ UNAME_RELEASE=`uname -v`
+ ;;
+ esac
+ # Japanese Language versions have a version number like `4.1.3-JL'.
+ echo sparc-sun-sunos"`echo "$UNAME_RELEASE"|sed -e 's/-/_/'`"
+ exit ;;
+ sun3*:SunOS:*:*)
+ echo m68k-sun-sunos"$UNAME_RELEASE"
+ exit ;;
+ sun*:*:4.2BSD:*)
+ UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+ test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3
+ case "`/bin/arch`" in
+ sun3)
+ echo m68k-sun-sunos"$UNAME_RELEASE"
+ ;;
+ sun4)
+ echo sparc-sun-sunos"$UNAME_RELEASE"
+ ;;
+ esac
+ exit ;;
+ aushp:SunOS:*:*)
+ echo sparc-auspex-sunos"$UNAME_RELEASE"
+ exit ;;
+ # The situation for MiNT is a little confusing. The machine name
+ # can be virtually everything (everything which is not
+ # "atarist" or "atariste" at least should have a processor
+ # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
+ # to the lowercase version "mint" (or "freemint"). Finally
+ # the system name "TOS" denotes a system which is actually not
+ # MiNT. But MiNT is downward compatible to TOS, so this should
+ # be no problem.
+ atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+ echo m68k-atari-mint"$UNAME_RELEASE"
+ exit ;;
+ atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+ echo m68k-atari-mint"$UNAME_RELEASE"
+ exit ;;
+ *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+ echo m68k-atari-mint"$UNAME_RELEASE"
+ exit ;;
+ milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+ echo m68k-milan-mint"$UNAME_RELEASE"
+ exit ;;
+ hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+ echo m68k-hades-mint"$UNAME_RELEASE"
+ exit ;;
+ *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+ echo m68k-unknown-mint"$UNAME_RELEASE"
+ exit ;;
+ m68k:machten:*:*)
+ echo m68k-apple-machten"$UNAME_RELEASE"
+ exit ;;
+ powerpc:machten:*:*)
+ echo powerpc-apple-machten"$UNAME_RELEASE"
+ exit ;;
+ RISC*:Mach:*:*)
+ echo mips-dec-mach_bsd4.3
+ exit ;;
+ RISC*:ULTRIX:*:*)
+ echo mips-dec-ultrix"$UNAME_RELEASE"
+ exit ;;
+ VAX*:ULTRIX*:*:*)
+ echo vax-dec-ultrix"$UNAME_RELEASE"
+ exit ;;
+ 2020:CLIX:*:* | 2430:CLIX:*:*)
+ echo clipper-intergraph-clix"$UNAME_RELEASE"
+ exit ;;
+ mips:*:*:UMIPS | mips:*:*:RISCos)
+ eval "$set_cc_for_build"
+ sed 's/^ //' << EOF > "$dummy.c"
+#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
+ int main (int argc, char *argv[]) {
+#else
+ int main (argc, argv) int argc; char *argv[]; {
+#endif
+ #if defined (host_mips) && defined (MIPSEB)
+ #if defined (SYSTYPE_SYSV)
+ printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_SVR4)
+ printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+ printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0);
+ #endif
+ #endif
+ exit (-1);
+ }
+EOF
+ $CC_FOR_BUILD -o "$dummy" "$dummy.c" &&
+ dummyarg=`echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+ SYSTEM_NAME=`"$dummy" "$dummyarg"` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ echo mips-mips-riscos"$UNAME_RELEASE"
+ exit ;;
+ Motorola:PowerMAX_OS:*:*)
+ echo powerpc-motorola-powermax
+ exit ;;
+ Motorola:*:4.3:PL8-*)
+ echo powerpc-harris-powermax
+ exit ;;
+ Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+ echo powerpc-harris-powermax
+ exit ;;
+ Night_Hawk:Power_UNIX:*:*)
+ echo powerpc-harris-powerunix
+ exit ;;
+ m88k:CX/UX:7*:*)
+ echo m88k-harris-cxux7
+ exit ;;
+ m88k:*:4*:R4*)
+ echo m88k-motorola-sysv4
+ exit ;;
+ m88k:*:3*:R3*)
+ echo m88k-motorola-sysv3
+ exit ;;
+ AViiON:dgux:*:*)
+ # DG/UX returns AViiON for all architectures
+ UNAME_PROCESSOR=`/usr/bin/uname -p`
+ if [ "$UNAME_PROCESSOR" = mc88100 ] || [ "$UNAME_PROCESSOR" = mc88110 ]
+ then
+ if [ "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx ] || \
+ [ "$TARGET_BINARY_INTERFACE"x = x ]
+ then
+ echo m88k-dg-dgux"$UNAME_RELEASE"
+ else
+ echo m88k-dg-dguxbcs"$UNAME_RELEASE"
+ fi
+ else
+ echo i586-dg-dgux"$UNAME_RELEASE"
+ fi
+ exit ;;
+ M88*:DolphinOS:*:*) # DolphinOS (SVR3)
+ echo m88k-dolphin-sysv3
+ exit ;;
+ M88*:*:R3*:*)
+ # Delta 88k system running SVR3
+ echo m88k-motorola-sysv3
+ exit ;;
+ XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+ echo m88k-tektronix-sysv3
+ exit ;;
+ Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+ echo m68k-tektronix-bsd
+ exit ;;
+ *:IRIX*:*:*)
+ echo mips-sgi-irix"`echo "$UNAME_RELEASE"|sed -e 's/-/_/g'`"
+ exit ;;
+ ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+ echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id
+ exit ;; # Note that: echo "'`uname -s`'" gives 'AIX '
+ i*86:AIX:*:*)
+ echo i386-ibm-aix
+ exit ;;
+ ia64:AIX:*:*)
+ if [ -x /usr/bin/oslevel ] ; then
+ IBM_REV=`/usr/bin/oslevel`
+ else
+ IBM_REV="$UNAME_VERSION.$UNAME_RELEASE"
+ fi
+ echo "$UNAME_MACHINE"-ibm-aix"$IBM_REV"
+ exit ;;
+ *:AIX:2:3)
+ if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+ eval "$set_cc_for_build"
+ sed 's/^ //' << EOF > "$dummy.c"
+ #include <sys/systemcfg.h>
+
+ main()
+ {
+ if (!__power_pc())
+ exit(1);
+ puts("powerpc-ibm-aix3.2.5");
+ exit(0);
+ }
+EOF
+ if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"`
+ then
+ echo "$SYSTEM_NAME"
+ else
+ echo rs6000-ibm-aix3.2.5
+ fi
+ elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+ echo rs6000-ibm-aix3.2.4
+ else
+ echo rs6000-ibm-aix3.2
+ fi
+ exit ;;
+ *:AIX:*:[4567])
+ IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+ if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then
+ IBM_ARCH=rs6000
+ else
+ IBM_ARCH=powerpc
+ fi
+ if [ -x /usr/bin/lslpp ] ; then
+ IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc |
+ awk -F: '{ print $3 }' | sed s/[0-9]*$/0/`
+ else
+ IBM_REV="$UNAME_VERSION.$UNAME_RELEASE"
+ fi
+ echo "$IBM_ARCH"-ibm-aix"$IBM_REV"
+ exit ;;
+ *:AIX:*:*)
+ echo rs6000-ibm-aix
+ exit ;;
+ ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*)
+ echo romp-ibm-bsd4.4
+ exit ;;
+ ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
+ echo romp-ibm-bsd"$UNAME_RELEASE" # 4.3 with uname added to
+ exit ;; # report: romp-ibm BSD 4.3
+ *:BOSX:*:*)
+ echo rs6000-bull-bosx
+ exit ;;
+ DPX/2?00:B.O.S.:*:*)
+ echo m68k-bull-sysv3
+ exit ;;
+ 9000/[34]??:4.3bsd:1.*:*)
+ echo m68k-hp-bsd
+ exit ;;
+ hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+ echo m68k-hp-bsd4.4
+ exit ;;
+ 9000/[34678]??:HP-UX:*:*)
+ HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'`
+ case "$UNAME_MACHINE" in
+ 9000/31?) HP_ARCH=m68000 ;;
+ 9000/[34]??) HP_ARCH=m68k ;;
+ 9000/[678][0-9][0-9])
+ if [ -x /usr/bin/getconf ]; then
+ sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+ sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+ case "$sc_cpu_version" in
+ 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0
+ 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1
+ 532) # CPU_PA_RISC2_0
+ case "$sc_kernel_bits" in
+ 32) HP_ARCH=hppa2.0n ;;
+ 64) HP_ARCH=hppa2.0w ;;
+ '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20
+ esac ;;
+ esac
+ fi
+ if [ "$HP_ARCH" = "" ]; then
+ eval "$set_cc_for_build"
+ sed 's/^ //' << EOF > "$dummy.c"
+
+ #define _HPUX_SOURCE
+ #include <stdlib.h>
+ #include <unistd.h>
+
+ int main ()
+ {
+ #if defined(_SC_KERNEL_BITS)
+ long bits = sysconf(_SC_KERNEL_BITS);
+ #endif
+ long cpu = sysconf (_SC_CPU_VERSION);
+
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+ case CPU_PA_RISC2_0:
+ #if defined(_SC_KERNEL_BITS)
+ switch (bits)
+ {
+ case 64: puts ("hppa2.0w"); break;
+ case 32: puts ("hppa2.0n"); break;
+ default: puts ("hppa2.0"); break;
+ } break;
+ #else /* !defined(_SC_KERNEL_BITS) */
+ puts ("hppa2.0"); break;
+ #endif
+ default: puts ("hppa1.0"); break;
+ }
+ exit (0);
+ }
+EOF
+ (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=`"$dummy"`
+ test -z "$HP_ARCH" && HP_ARCH=hppa
+ fi ;;
+ esac
+ if [ "$HP_ARCH" = hppa2.0w ]
+ then
+ eval "$set_cc_for_build"
+
+ # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+ # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
+ # generating 64-bit code. GNU and HP use different nomenclature:
+ #
+ # $ CC_FOR_BUILD=cc ./config.guess
+ # => hppa2.0w-hp-hpux11.23
+ # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+ # => hppa64-hp-hpux11.23
+
+ if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) |
+ grep -q __LP64__
+ then
+ HP_ARCH=hppa2.0w
+ else
+ HP_ARCH=hppa64
+ fi
+ fi
+ echo "$HP_ARCH"-hp-hpux"$HPUX_REV"
+ exit ;;
+ ia64:HP-UX:*:*)
+ HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'`
+ echo ia64-hp-hpux"$HPUX_REV"
+ exit ;;
+ 3050*:HI-UX:*:*)
+ eval "$set_cc_for_build"
+ sed 's/^ //' << EOF > "$dummy.c"
+ #include <unistd.h>
+ int
+ main ()
+ {
+ long cpu = sysconf (_SC_CPU_VERSION);
+ /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+ true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
+ results, however. */
+ if (CPU_IS_PA_RISC (cpu))
+ {
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+ default: puts ("hppa-hitachi-hiuxwe2"); break;
+ }
+ }
+ else if (CPU_IS_HP_MC68K (cpu))
+ puts ("m68k-hitachi-hiuxwe2");
+ else puts ("unknown-hitachi-hiuxwe2");
+ exit (0);
+ }
+EOF
+ $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ echo unknown-hitachi-hiuxwe2
+ exit ;;
+ 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*)
+ echo hppa1.1-hp-bsd
+ exit ;;
+ 9000/8??:4.3bsd:*:*)
+ echo hppa1.0-hp-bsd
+ exit ;;
+ *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+ echo hppa1.0-hp-mpeix
+ exit ;;
+ hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*)
+ echo hppa1.1-hp-osf
+ exit ;;
+ hp8??:OSF1:*:*)
+ echo hppa1.0-hp-osf
+ exit ;;
+ i*86:OSF1:*:*)
+ if [ -x /usr/sbin/sysversion ] ; then
+ echo "$UNAME_MACHINE"-unknown-osf1mk
+ else
+ echo "$UNAME_MACHINE"-unknown-osf1
+ fi
+ exit ;;
+ parisc*:Lites*:*:*)
+ echo hppa1.1-hp-lites
+ exit ;;
+ C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+ echo c1-convex-bsd
+ exit ;;
+ C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+ if getsysinfo -f scalar_acc
+ then echo c32-convex-bsd
+ else echo c2-convex-bsd
+ fi
+ exit ;;
+ C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+ echo c34-convex-bsd
+ exit ;;
+ C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+ echo c38-convex-bsd
+ exit ;;
+ C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+ echo c4-convex-bsd
+ exit ;;
+ CRAY*Y-MP:*:*:*)
+ echo ymp-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*[A-Z]90:*:*:*)
+ echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \
+ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+ -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*TS:*:*:*)
+ echo t90-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*T3E:*:*:*)
+ echo alphaev5-cray-unicosmk"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*SV1:*:*:*)
+ echo sv1-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ *:UNICOS/mp:*:*)
+ echo craynv-cray-unicosmp"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+ FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
+ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+ FUJITSU_REL=`echo "$UNAME_RELEASE" | sed -e 's/ /_/'`
+ echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+ exit ;;
+ 5000:UNIX_System_V:4.*:*)
+ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+ FUJITSU_REL=`echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'`
+ echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+ exit ;;
+ i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+ echo "$UNAME_MACHINE"-pc-bsdi"$UNAME_RELEASE"
+ exit ;;
+ sparc*:BSD/OS:*:*)
+ echo sparc-unknown-bsdi"$UNAME_RELEASE"
+ exit ;;
+ *:BSD/OS:*:*)
+ echo "$UNAME_MACHINE"-unknown-bsdi"$UNAME_RELEASE"
+ exit ;;
+ *:FreeBSD:*:*)
+ UNAME_PROCESSOR=`/usr/bin/uname -p`
+ case "$UNAME_PROCESSOR" in
+ amd64)
+ UNAME_PROCESSOR=x86_64 ;;
+ i386)
+ UNAME_PROCESSOR=i586 ;;
+ esac
+ echo "$UNAME_PROCESSOR"-unknown-freebsd"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`"
+ exit ;;
+ i*:CYGWIN*:*)
+ echo "$UNAME_MACHINE"-pc-cygwin
+ exit ;;
+ *:MINGW64*:*)
+ echo "$UNAME_MACHINE"-pc-mingw64
+ exit ;;
+ *:MINGW*:*)
+ echo "$UNAME_MACHINE"-pc-mingw32
+ exit ;;
+ *:MSYS*:*)
+ echo "$UNAME_MACHINE"-pc-msys
+ exit ;;
+ i*:PW*:*)
+ echo "$UNAME_MACHINE"-pc-pw32
+ exit ;;
+ *:Interix*:*)
+ case "$UNAME_MACHINE" in
+ x86)
+ echo i586-pc-interix"$UNAME_RELEASE"
+ exit ;;
+ authenticamd | genuineintel | EM64T)
+ echo x86_64-unknown-interix"$UNAME_RELEASE"
+ exit ;;
+ IA64)
+ echo ia64-unknown-interix"$UNAME_RELEASE"
+ exit ;;
+ esac ;;
+ i*:UWIN*:*)
+ echo "$UNAME_MACHINE"-pc-uwin
+ exit ;;
+ amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+ echo x86_64-unknown-cygwin
+ exit ;;
+ prep*:SunOS:5.*:*)
+ echo powerpcle-unknown-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`"
+ exit ;;
+ *:GNU:*:*)
+ # the GNU system
+ echo "`echo "$UNAME_MACHINE"|sed -e 's,[-/].*$,,'`-unknown-$LIBC`echo "$UNAME_RELEASE"|sed -e 's,/.*$,,'`"
+ exit ;;
+ *:GNU/*:*:*)
+ # other systems with GNU libc and userland
+ echo "$UNAME_MACHINE-unknown-`echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`-$LIBC"
+ exit ;;
+ i*86:Minix:*:*)
+ echo "$UNAME_MACHINE"-pc-minix
+ exit ;;
+ aarch64:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ aarch64_be:Linux:*:*)
+ UNAME_MACHINE=aarch64_be
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ alpha:Linux:*:*)
+ case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+ EV5) UNAME_MACHINE=alphaev5 ;;
+ EV56) UNAME_MACHINE=alphaev56 ;;
+ PCA56) UNAME_MACHINE=alphapca56 ;;
+ PCA57) UNAME_MACHINE=alphapca56 ;;
+ EV6) UNAME_MACHINE=alphaev6 ;;
+ EV67) UNAME_MACHINE=alphaev67 ;;
+ EV68*) UNAME_MACHINE=alphaev68 ;;
+ esac
+ objdump --private-headers /bin/sh | grep -q ld.so.1
+ if test "$?" = 0 ; then LIBC=gnulibc1 ; fi
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ arc:Linux:*:* | arceb:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ arm*:Linux:*:*)
+ eval "$set_cc_for_build"
+ if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_EABI__
+ then
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ else
+ if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_PCS_VFP
+ then
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabi
+ else
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabihf
+ fi
+ fi
+ exit ;;
+ avr32*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ cris:Linux:*:*)
+ echo "$UNAME_MACHINE"-axis-linux-"$LIBC"
+ exit ;;
+ crisv32:Linux:*:*)
+ echo "$UNAME_MACHINE"-axis-linux-"$LIBC"
+ exit ;;
+ e2k:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ frv:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ hexagon:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ i*86:Linux:*:*)
+ echo "$UNAME_MACHINE"-pc-linux-"$LIBC"
+ exit ;;
+ ia64:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ k1om:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ m32r*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ m68*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ mips:Linux:*:* | mips64:Linux:*:*)
+ eval "$set_cc_for_build"
+ sed 's/^ //' << EOF > "$dummy.c"
+ #undef CPU
+ #undef ${UNAME_MACHINE}
+ #undef ${UNAME_MACHINE}el
+ #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+ CPU=${UNAME_MACHINE}el
+ #else
+ #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+ CPU=${UNAME_MACHINE}
+ #else
+ CPU=
+ #endif
+ #endif
+EOF
+ eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU'`"
+ test "x$CPU" != x && { echo "$CPU-unknown-linux-$LIBC"; exit; }
+ ;;
+ mips64el:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ openrisc*:Linux:*:*)
+ echo or1k-unknown-linux-"$LIBC"
+ exit ;;
+ or32:Linux:*:* | or1k*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ padre:Linux:*:*)
+ echo sparc-unknown-linux-"$LIBC"
+ exit ;;
+ parisc64:Linux:*:* | hppa64:Linux:*:*)
+ echo hppa64-unknown-linux-"$LIBC"
+ exit ;;
+ parisc:Linux:*:* | hppa:Linux:*:*)
+ # Look for CPU level
+ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+ PA7*) echo hppa1.1-unknown-linux-"$LIBC" ;;
+ PA8*) echo hppa2.0-unknown-linux-"$LIBC" ;;
+ *) echo hppa-unknown-linux-"$LIBC" ;;
+ esac
+ exit ;;
+ ppc64:Linux:*:*)
+ echo powerpc64-unknown-linux-"$LIBC"
+ exit ;;
+ ppc:Linux:*:*)
+ echo powerpc-unknown-linux-"$LIBC"
+ exit ;;
+ ppc64le:Linux:*:*)
+ echo powerpc64le-unknown-linux-"$LIBC"
+ exit ;;
+ ppcle:Linux:*:*)
+ echo powerpcle-unknown-linux-"$LIBC"
+ exit ;;
+ riscv32:Linux:*:* | riscv64:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ s390:Linux:*:* | s390x:Linux:*:*)
+ echo "$UNAME_MACHINE"-ibm-linux-"$LIBC"
+ exit ;;
+ sh64*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ sh*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ sparc:Linux:*:* | sparc64:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ tile*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ vax:Linux:*:*)
+ echo "$UNAME_MACHINE"-dec-linux-"$LIBC"
+ exit ;;
+ x86_64:Linux:*:*)
+ echo "$UNAME_MACHINE"-pc-linux-"$LIBC"
+ exit ;;
+ xtensa*:Linux:*:*)
+ echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
+ exit ;;
+ i*86:DYNIX/ptx:4*:*)
+ # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+ # earlier versions are messed up and put the nodename in both
+ # sysname and nodename.
+ echo i386-sequent-sysv4
+ exit ;;
+ i*86:UNIX_SV:4.2MP:2.*)
+ # Unixware is an offshoot of SVR4, but it has its own version
+ # number series starting with 2...
+ # I am not positive that other SVR4 systems won't match this,
+ # I just have to hope. -- rms.
+ # Use sysv4.2uw... so that sysv4* matches it.
+ echo "$UNAME_MACHINE"-pc-sysv4.2uw"$UNAME_VERSION"
+ exit ;;
+ i*86:OS/2:*:*)
+ # If we were able to find `uname', then EMX Unix compatibility
+ # is probably installed.
+ echo "$UNAME_MACHINE"-pc-os2-emx
+ exit ;;
+ i*86:XTS-300:*:STOP)
+ echo "$UNAME_MACHINE"-unknown-stop
+ exit ;;
+ i*86:atheos:*:*)
+ echo "$UNAME_MACHINE"-unknown-atheos
+ exit ;;
+ i*86:syllable:*:*)
+ echo "$UNAME_MACHINE"-pc-syllable
+ exit ;;
+ i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+ echo i386-unknown-lynxos"$UNAME_RELEASE"
+ exit ;;
+ i*86:*DOS:*:*)
+ echo "$UNAME_MACHINE"-pc-msdosdjgpp
+ exit ;;
+ i*86:*:4.*:*)
+ UNAME_REL=`echo "$UNAME_RELEASE" | sed 's/\/MP$//'`
+ if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+ echo "$UNAME_MACHINE"-univel-sysv"$UNAME_REL"
+ else
+ echo "$UNAME_MACHINE"-pc-sysv"$UNAME_REL"
+ fi
+ exit ;;
+ i*86:*:5:[678]*)
+ # UnixWare 7.x, OpenUNIX and OpenServer 6.
+ case `/bin/uname -X | grep "^Machine"` in
+ *486*) UNAME_MACHINE=i486 ;;
+ *Pentium) UNAME_MACHINE=i586 ;;
+ *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+ esac
+ echo "$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}{$UNAME_VERSION}"
+ exit ;;
+ i*86:*:3.2:*)
+ if test -f /usr/options/cb.name; then
+ UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+ echo "$UNAME_MACHINE"-pc-isc"$UNAME_REL"
+ elif /bin/uname -X 2>/dev/null >/dev/null ; then
+ UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+ (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+ (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+ && UNAME_MACHINE=i586
+ (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+ && UNAME_MACHINE=i686
+ (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+ && UNAME_MACHINE=i686
+ echo "$UNAME_MACHINE"-pc-sco"$UNAME_REL"
+ else
+ echo "$UNAME_MACHINE"-pc-sysv32
+ fi
+ exit ;;
+ pc:*:*:*)
+ # Left here for compatibility:
+ # uname -m prints for DJGPP always 'pc', but it prints nothing about
+ # the processor, so we play safe by assuming i586.
+ # Note: whatever this is, it MUST be the same as what config.sub
+ # prints for the "djgpp" host, or else GDB configure will decide that
+ # this is a cross-build.
+ echo i586-pc-msdosdjgpp
+ exit ;;
+ Intel:Mach:3*:*)
+ echo i386-pc-mach3
+ exit ;;
+ paragon:*:*:*)
+ echo i860-intel-osf1
+ exit ;;
+ i860:*:4.*:*) # i860-SVR4
+ if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+ echo i860-stardent-sysv"$UNAME_RELEASE" # Stardent Vistra i860-SVR4
+ else # Add other i860-SVR4 vendors below as they are discovered.
+ echo i860-unknown-sysv"$UNAME_RELEASE" # Unknown i860-SVR4
+ fi
+ exit ;;
+ mini*:CTIX:SYS*5:*)
+ # "miniframe"
+ echo m68010-convergent-sysv
+ exit ;;
+ mc68k:UNIX:SYSTEM5:3.51m)
+ echo m68k-convergent-sysv
+ exit ;;
+ M680?0:D-NIX:5.3:*)
+ echo m68k-diab-dnix
+ exit ;;
+ M68*:*:R3V[5678]*:*)
+ test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+ 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+ OS_REL=''
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+ 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4; exit; } ;;
+ NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+ OS_REL='.3'
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+ m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+ echo m68k-unknown-lynxos"$UNAME_RELEASE"
+ exit ;;
+ mc68030:UNIX_System_V:4.*:*)
+ echo m68k-atari-sysv4
+ exit ;;
+ TSUNAMI:LynxOS:2.*:*)
+ echo sparc-unknown-lynxos"$UNAME_RELEASE"
+ exit ;;
+ rs6000:LynxOS:2.*:*)
+ echo rs6000-unknown-lynxos"$UNAME_RELEASE"
+ exit ;;
+ PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+ echo powerpc-unknown-lynxos"$UNAME_RELEASE"
+ exit ;;
+ SM[BE]S:UNIX_SV:*:*)
+ echo mips-dde-sysv"$UNAME_RELEASE"
+ exit ;;
+ RM*:ReliantUNIX-*:*:*)
+ echo mips-sni-sysv4
+ exit ;;
+ RM*:SINIX-*:*:*)
+ echo mips-sni-sysv4
+ exit ;;
+ *:SINIX-*:*:*)
+ if uname -p 2>/dev/null >/dev/null ; then
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ echo "$UNAME_MACHINE"-sni-sysv4
+ else
+ echo ns32k-sni-sysv
+ fi
+ exit ;;
+ PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+ # says <Richard.M.Bartel@ccMail.Census.GOV>
+ echo i586-unisys-sysv4
+ exit ;;
+ *:UNIX_System_V:4*:FTX*)
+ # From Gerald Hewes <hewes@openmarket.com>.
+ # How about differentiating between stratus architectures? -djm
+ echo hppa1.1-stratus-sysv4
+ exit ;;
+ *:*:*:FTX*)
+ # From seanf@swdc.stratus.com.
+ echo i860-stratus-sysv4
+ exit ;;
+ i*86:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ echo "$UNAME_MACHINE"-stratus-vos
+ exit ;;
+ *:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ echo hppa1.1-stratus-vos
+ exit ;;
+ mc68*:A/UX:*:*)
+ echo m68k-apple-aux"$UNAME_RELEASE"
+ exit ;;
+ news*:NEWS-OS:6*:*)
+ echo mips-sony-newsos6
+ exit ;;
+ R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+ if [ -d /usr/nec ]; then
+ echo mips-nec-sysv"$UNAME_RELEASE"
+ else
+ echo mips-unknown-sysv"$UNAME_RELEASE"
+ fi
+ exit ;;
+ BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
+ echo powerpc-be-beos
+ exit ;;
+ BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
+ echo powerpc-apple-beos
+ exit ;;
+ BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
+ echo i586-pc-beos
+ exit ;;
+ BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
+ echo i586-pc-haiku
+ exit ;;
+ x86_64:Haiku:*:*)
+ echo x86_64-unknown-haiku
+ exit ;;
+ SX-4:SUPER-UX:*:*)
+ echo sx4-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-5:SUPER-UX:*:*)
+ echo sx5-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-6:SUPER-UX:*:*)
+ echo sx6-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-7:SUPER-UX:*:*)
+ echo sx7-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-8:SUPER-UX:*:*)
+ echo sx8-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-8R:SUPER-UX:*:*)
+ echo sx8r-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ SX-ACE:SUPER-UX:*:*)
+ echo sxace-nec-superux"$UNAME_RELEASE"
+ exit ;;
+ Power*:Rhapsody:*:*)
+ echo powerpc-apple-rhapsody"$UNAME_RELEASE"
+ exit ;;
+ *:Rhapsody:*:*)
+ echo "$UNAME_MACHINE"-apple-rhapsody"$UNAME_RELEASE"
+ exit ;;
+ *:Darwin:*:*)
+ UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+ eval "$set_cc_for_build"
+ if test "$UNAME_PROCESSOR" = unknown ; then
+ UNAME_PROCESSOR=powerpc
+ fi
+ if test "`echo "$UNAME_RELEASE" | sed -e 's/\..*//'`" -le 10 ; then
+ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then
+ if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ case $UNAME_PROCESSOR in
+ i386) UNAME_PROCESSOR=x86_64 ;;
+ powerpc) UNAME_PROCESSOR=powerpc64 ;;
+ esac
+ fi
+ # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc
+ if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_PPC >/dev/null
+ then
+ UNAME_PROCESSOR=powerpc
+ fi
+ fi
+ elif test "$UNAME_PROCESSOR" = i386 ; then
+ # Avoid executing cc on OS X 10.9, as it ships with a stub
+ # that puts up a graphical alert prompting to install
+ # developer tools. Any system running Mac OS X 10.7 or
+ # later (Darwin 11 and later) is required to have a 64-bit
+ # processor. This is not true of the ARM version of Darwin
+ # that Apple uses in portable devices.
+ UNAME_PROCESSOR=x86_64
+ fi
+ echo "$UNAME_PROCESSOR"-apple-darwin"$UNAME_RELEASE"
+ exit ;;
+ *:procnto*:*:* | *:QNX:[0123456789]*:*)
+ UNAME_PROCESSOR=`uname -p`
+ if test "$UNAME_PROCESSOR" = x86; then
+ UNAME_PROCESSOR=i386
+ UNAME_MACHINE=pc
+ fi
+ echo "$UNAME_PROCESSOR"-"$UNAME_MACHINE"-nto-qnx"$UNAME_RELEASE"
+ exit ;;
+ *:QNX:*:4*)
+ echo i386-pc-qnx
+ exit ;;
+ NEO-*:NONSTOP_KERNEL:*:*)
+ echo neo-tandem-nsk"$UNAME_RELEASE"
+ exit ;;
+ NSE-*:NONSTOP_KERNEL:*:*)
+ echo nse-tandem-nsk"$UNAME_RELEASE"
+ exit ;;
+ NSR-*:NONSTOP_KERNEL:*:*)
+ echo nsr-tandem-nsk"$UNAME_RELEASE"
+ exit ;;
+ NSV-*:NONSTOP_KERNEL:*:*)
+ echo nsv-tandem-nsk"$UNAME_RELEASE"
+ exit ;;
+ NSX-*:NONSTOP_KERNEL:*:*)
+ echo nsx-tandem-nsk"$UNAME_RELEASE"
+ exit ;;
+ *:NonStop-UX:*:*)
+ echo mips-compaq-nonstopux
+ exit ;;
+ BS2000:POSIX*:*:*)
+ echo bs2000-siemens-sysv
+ exit ;;
+ DS/*:UNIX_System_V:*:*)
+ echo "$UNAME_MACHINE"-"$UNAME_SYSTEM"-"$UNAME_RELEASE"
+ exit ;;
+ *:Plan9:*:*)
+ # "uname -m" is not consistent, so use $cputype instead. 386
+ # is converted to i386 for consistency with other x86
+ # operating systems.
+ if test "$cputype" = 386; then
+ UNAME_MACHINE=i386
+ else
+ UNAME_MACHINE="$cputype"
+ fi
+ echo "$UNAME_MACHINE"-unknown-plan9
+ exit ;;
+ *:TOPS-10:*:*)
+ echo pdp10-unknown-tops10
+ exit ;;
+ *:TENEX:*:*)
+ echo pdp10-unknown-tenex
+ exit ;;
+ KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+ echo pdp10-dec-tops20
+ exit ;;
+ XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+ echo pdp10-xkl-tops20
+ exit ;;
+ *:TOPS-20:*:*)
+ echo pdp10-unknown-tops20
+ exit ;;
+ *:ITS:*:*)
+ echo pdp10-unknown-its
+ exit ;;
+ SEI:*:*:SEIUX)
+ echo mips-sei-seiux"$UNAME_RELEASE"
+ exit ;;
+ *:DragonFly:*:*)
+ echo "$UNAME_MACHINE"-unknown-dragonfly"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`"
+ exit ;;
+ *:*VMS:*:*)
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ case "$UNAME_MACHINE" in
+ A*) echo alpha-dec-vms ; exit ;;
+ I*) echo ia64-dec-vms ; exit ;;
+ V*) echo vax-dec-vms ; exit ;;
+ esac ;;
+ *:XENIX:*:SysV)
+ echo i386-pc-xenix
+ exit ;;
+ i*86:skyos:*:*)
+ echo "$UNAME_MACHINE"-pc-skyos"`echo "$UNAME_RELEASE" | sed -e 's/ .*$//'`"
+ exit ;;
+ i*86:rdos:*:*)
+ echo "$UNAME_MACHINE"-pc-rdos
+ exit ;;
+ i*86:AROS:*:*)
+ echo "$UNAME_MACHINE"-pc-aros
+ exit ;;
+ x86_64:VMkernel:*:*)
+ echo "$UNAME_MACHINE"-unknown-esx
+ exit ;;
+ amd64:Isilon\ OneFS:*:*)
+ echo x86_64-unknown-onefs
+ exit ;;
+esac
+
+echo "$0: unable to guess system type" >&2
+
+case "$UNAME_MACHINE:$UNAME_SYSTEM" in
+ mips:Linux | mips64:Linux)
+ # If we got here on MIPS GNU/Linux, output extra information.
+ cat >&2 <<EOF
+
+NOTE: MIPS GNU/Linux systems require a C compiler to fully recognize
+the system type. Please install a C compiler and try again.
+EOF
+ ;;
+esac
+
+cat >&2 <<EOF
+
+This script (version $timestamp), has failed to recognize the
+operating system you are using. If your script is old, overwrite *all*
+copies of config.guess and config.sub with the latest versions from:
+
+ https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess
+and
+ https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub
+
+If $0 has already been updated, send the following data and any
+information you think might be pertinent to config-patches@gnu.org to
+provide the necessary information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo = `(hostinfo) 2>/dev/null`
+/bin/universe = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = "$UNAME_MACHINE"
+UNAME_RELEASE = "$UNAME_RELEASE"
+UNAME_SYSTEM = "$UNAME_SYSTEM"
+UNAME_VERSION = "$UNAME_VERSION"
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/libcody/build-aux/config.sub b/libcody/build-aux/config.sub
new file mode 100755
index 0000000..20f7cf2
--- /dev/null
+++ b/libcody/build-aux/config.sub
@@ -0,0 +1,1833 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+# Copyright 1992-2018 Free Software Foundation, Inc.
+
+timestamp='2018-05-05'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program. This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+
+
+# Please send patches to <config-patches@gnu.org>.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support. The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS
+
+Canonicalize a configuration name.
+
+Options:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright 1992-2018 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help"
+ exit 1 ;;
+
+ *local*)
+ # First pass through any local machine types.
+ echo "$1"
+ exit ;;
+
+ * )
+ break ;;
+ esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+ exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+ exit 1;;
+esac
+
+# Spilt fields of configuration type
+IFS="-" read -r field1 field2 field3 field4 <<EOF
+$1
+EOF
+
+# Separate into logical components for further validation
+case $1 in
+ *-*-*-*)
+ basic_machine=$field1-$field2
+ os=-$field3-$field4
+ ;;
+ *-*-*)
+ # Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two
+ # parts
+ maybe_os=$field2-$field3
+ case $maybe_os in
+ nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc \
+ | linux-newlib* | linux-musl* | linux-uclibc* | uclinux-uclibc* \
+ | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \
+ | netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \
+ | storm-chaos* | os2-emx* | rtmk-nova*)
+ basic_machine=$field1
+ os=-$maybe_os
+ ;;
+ android-linux)
+ basic_machine=$field1-unknown
+ os=-linux-android
+ ;;
+ *)
+ basic_machine=$field1-$field2
+ os=-$field3
+ ;;
+ esac
+ ;;
+ *-*)
+ basic_machine=$field1
+ os=-$field2
+ ;;
+ *)
+ basic_machine=$1
+ os=
+ ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work. We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+ -sun*os*)
+ # Prevent following clause from handling this invalid input.
+ ;;
+ -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+ -apple | -axis | -knuth | -cray | -microblaze*)
+ os=
+ basic_machine=$1
+ ;;
+ -bluegene*)
+ os=-cnk
+ ;;
+ -sim | -cisco | -oki | -wec | -winbond)
+ os=
+ basic_machine=$1
+ ;;
+ -scout)
+ ;;
+ -wrs)
+ os=-vxworks
+ basic_machine=$1
+ ;;
+ -chorusos*)
+ os=-chorusos
+ basic_machine=$1
+ ;;
+ -chorusrdb)
+ os=-chorusrdb
+ basic_machine=$1
+ ;;
+ -hiux*)
+ os=-hiuxwe2
+ ;;
+ -sco6)
+ os=-sco5v6
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco5)
+ os=-sco3.2v5
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco4)
+ os=-sco3.2v4
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco3.2.[4-9]*)
+ os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco3.2v[4-9]*)
+ # Don't forget version if it is 3.2v4 or newer.
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco5v6*)
+ # Don't forget version if it is 3.2v4 or newer.
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco*)
+ os=-sco3.2v2
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -udk*)
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -isc)
+ os=-isc2.2
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -clix*)
+ basic_machine=clipper-intergraph
+ ;;
+ -isc*)
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -lynx*178)
+ os=-lynxos178
+ ;;
+ -lynx*5)
+ os=-lynxos5
+ ;;
+ -lynx*)
+ os=-lynxos
+ ;;
+ -ptx*)
+ basic_machine=`echo "$1" | sed -e 's/86-.*/86-sequent/'`
+ ;;
+ -psos*)
+ os=-psos
+ ;;
+ -mint | -mint[0-9]*)
+ basic_machine=m68k-atari
+ os=-mint
+ ;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+ # Recognize the basic CPU types without company name.
+ # Some are omitted here because they have special meanings below.
+ 1750a | 580 \
+ | a29k \
+ | aarch64 | aarch64_be \
+ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+ | am33_2.0 \
+ | arc | arceb \
+ | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv6m | armv[78][arm] \
+ | avr | avr32 \
+ | ba \
+ | be32 | be64 \
+ | bfin \
+ | c4x | c8051 | clipper | csky \
+ | d10v | d30v | dlx | dsp16xx \
+ | e2k | epiphany \
+ | fido | fr30 | frv | ft32 \
+ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+ | hexagon \
+ | i370 | i860 | i960 | ia16 | ia64 \
+ | ip2k | iq2000 \
+ | k1om \
+ | le32 | le64 \
+ | lm32 \
+ | m32c | m32r | m32rle | m68000 | m68k | m88k \
+ | maxq | mb | microblaze | microblazeel | mcore | mep | metag \
+ | mips | mipsbe | mipseb | mipsel | mipsle \
+ | mips16 \
+ | mips64 | mips64el \
+ | mips64octeon | mips64octeonel \
+ | mips64orion | mips64orionel \
+ | mips64r5900 | mips64r5900el \
+ | mips64vr | mips64vrel \
+ | mips64vr4100 | mips64vr4100el \
+ | mips64vr4300 | mips64vr4300el \
+ | mips64vr5000 | mips64vr5000el \
+ | mips64vr5900 | mips64vr5900el \
+ | mipsisa32 | mipsisa32el \
+ | mipsisa32r2 | mipsisa32r2el \
+ | mipsisa32r6 | mipsisa32r6el \
+ | mipsisa64 | mipsisa64el \
+ | mipsisa64r2 | mipsisa64r2el \
+ | mipsisa64r6 | mipsisa64r6el \
+ | mipsisa64sb1 | mipsisa64sb1el \
+ | mipsisa64sr71k | mipsisa64sr71kel \
+ | mipsr5900 | mipsr5900el \
+ | mipstx39 | mipstx39el \
+ | mn10200 | mn10300 \
+ | moxie \
+ | mt \
+ | msp430 \
+ | nds32 | nds32le | nds32be \
+ | nfp \
+ | nios | nios2 | nios2eb | nios2el \
+ | ns16k | ns32k \
+ | open8 | or1k | or1knd | or32 \
+ | pdp10 | pj | pjl \
+ | powerpc | powerpc64 | powerpc64le | powerpcle \
+ | pru \
+ | pyramid \
+ | riscv32 | riscv64 \
+ | rl78 | rx \
+ | score \
+ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+ | sh64 | sh64le \
+ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+ | spu \
+ | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \
+ | ubicom32 \
+ | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \
+ | visium \
+ | wasm32 \
+ | x86 | xc16x | xstormy16 | xtensa \
+ | z8k | z80)
+ basic_machine=$basic_machine-unknown
+ ;;
+ c54x)
+ basic_machine=tic54x-unknown
+ ;;
+ c55x)
+ basic_machine=tic55x-unknown
+ ;;
+ c6x)
+ basic_machine=tic6x-unknown
+ ;;
+ leon|leon[3-9])
+ basic_machine=sparc-$basic_machine
+ ;;
+ m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip)
+ basic_machine=$basic_machine-unknown
+ os=-none
+ ;;
+ m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65)
+ ;;
+ m9s12z | m68hcs12z | hcs12z | s12z)
+ basic_machine=s12z-unknown
+ os=-none
+ ;;
+ ms1)
+ basic_machine=mt-unknown
+ ;;
+
+ strongarm | thumb | xscale)
+ basic_machine=arm-unknown
+ ;;
+ xgate)
+ basic_machine=$basic_machine-unknown
+ os=-none
+ ;;
+ xscaleeb)
+ basic_machine=armeb-unknown
+ ;;
+
+ xscaleel)
+ basic_machine=armel-unknown
+ ;;
+
+ # We use `pc' rather than `unknown'
+ # because (1) that's what they normally are, and
+ # (2) the word "unknown" tends to confuse beginning users.
+ i*86 | x86_64)
+ basic_machine=$basic_machine-pc
+ ;;
+ # Object if more than one company name word.
+ *-*-*)
+ echo Invalid configuration \`"$1"\': machine \`"$basic_machine"\' not recognized 1>&2
+ exit 1
+ ;;
+ # Recognize the basic CPU types with company name.
+ 580-* \
+ | a29k-* \
+ | aarch64-* | aarch64_be-* \
+ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \
+ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
+ | avr-* | avr32-* \
+ | ba-* \
+ | be32-* | be64-* \
+ | bfin-* | bs2000-* \
+ | c[123]* | c30-* | [cjt]90-* | c4x-* \
+ | c8051-* | clipper-* | craynv-* | csky-* | cydra-* \
+ | d10v-* | d30v-* | dlx-* \
+ | e2k-* | elxsi-* \
+ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
+ | h8300-* | h8500-* \
+ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+ | hexagon-* \
+ | i*86-* | i860-* | i960-* | ia16-* | ia64-* \
+ | ip2k-* | iq2000-* \
+ | k1om-* \
+ | le32-* | le64-* \
+ | lm32-* \
+ | m32c-* | m32r-* | m32rle-* \
+ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \
+ | microblaze-* | microblazeel-* \
+ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+ | mips16-* \
+ | mips64-* | mips64el-* \
+ | mips64octeon-* | mips64octeonel-* \
+ | mips64orion-* | mips64orionel-* \
+ | mips64r5900-* | mips64r5900el-* \
+ | mips64vr-* | mips64vrel-* \
+ | mips64vr4100-* | mips64vr4100el-* \
+ | mips64vr4300-* | mips64vr4300el-* \
+ | mips64vr5000-* | mips64vr5000el-* \
+ | mips64vr5900-* | mips64vr5900el-* \
+ | mipsisa32-* | mipsisa32el-* \
+ | mipsisa32r2-* | mipsisa32r2el-* \
+ | mipsisa32r6-* | mipsisa32r6el-* \
+ | mipsisa64-* | mipsisa64el-* \
+ | mipsisa64r2-* | mipsisa64r2el-* \
+ | mipsisa64r6-* | mipsisa64r6el-* \
+ | mipsisa64sb1-* | mipsisa64sb1el-* \
+ | mipsisa64sr71k-* | mipsisa64sr71kel-* \
+ | mipsr5900-* | mipsr5900el-* \
+ | mipstx39-* | mipstx39el-* \
+ | mmix-* \
+ | mt-* \
+ | msp430-* \
+ | nds32-* | nds32le-* | nds32be-* \
+ | nfp-* \
+ | nios-* | nios2-* | nios2eb-* | nios2el-* \
+ | none-* | np1-* | ns16k-* | ns32k-* \
+ | open8-* \
+ | or1k*-* \
+ | orion-* \
+ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \
+ | pru-* \
+ | pyramid-* \
+ | riscv32-* | riscv64-* \
+ | rl78-* | romp-* | rs6000-* | rx-* \
+ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+ | sparclite-* \
+ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \
+ | tahoe-* \
+ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+ | tile*-* \
+ | tron-* \
+ | ubicom32-* \
+ | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \
+ | vax-* \
+ | visium-* \
+ | wasm32-* \
+ | we32k-* \
+ | x86-* | x86_64-* | xc16x-* | xps100-* \
+ | xstormy16-* | xtensa*-* \
+ | ymp-* \
+ | z8k-* | z80-*)
+ ;;
+ # Recognize the basic CPU types without company name, with glob match.
+ xtensa*)
+ basic_machine=$basic_machine-unknown
+ ;;
+ # Recognize the various machine names and aliases which stand
+ # for a CPU type and a company and sometimes even an OS.
+ 386bsd)
+ basic_machine=i386-pc
+ os=-bsd
+ ;;
+ 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+ basic_machine=m68000-att
+ ;;
+ 3b*)
+ basic_machine=we32k-att
+ ;;
+ a29khif)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ abacus)
+ basic_machine=abacus-unknown
+ ;;
+ adobe68k)
+ basic_machine=m68010-adobe
+ os=-scout
+ ;;
+ alliant | fx80)
+ basic_machine=fx80-alliant
+ ;;
+ altos | altos3068)
+ basic_machine=m68k-altos
+ ;;
+ am29k)
+ basic_machine=a29k-none
+ os=-bsd
+ ;;
+ amd64)
+ basic_machine=x86_64-pc
+ ;;
+ amd64-*)
+ basic_machine=x86_64-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ amdahl)
+ basic_machine=580-amdahl
+ os=-sysv
+ ;;
+ amiga | amiga-*)
+ basic_machine=m68k-unknown
+ ;;
+ amigaos | amigados)
+ basic_machine=m68k-unknown
+ os=-amigaos
+ ;;
+ amigaunix | amix)
+ basic_machine=m68k-unknown
+ os=-sysv4
+ ;;
+ apollo68)
+ basic_machine=m68k-apollo
+ os=-sysv
+ ;;
+ apollo68bsd)
+ basic_machine=m68k-apollo
+ os=-bsd
+ ;;
+ aros)
+ basic_machine=i386-pc
+ os=-aros
+ ;;
+ asmjs)
+ basic_machine=asmjs-unknown
+ ;;
+ aux)
+ basic_machine=m68k-apple
+ os=-aux
+ ;;
+ balance)
+ basic_machine=ns32k-sequent
+ os=-dynix
+ ;;
+ blackfin)
+ basic_machine=bfin-unknown
+ os=-linux
+ ;;
+ blackfin-*)
+ basic_machine=bfin-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ bluegene*)
+ basic_machine=powerpc-ibm
+ os=-cnk
+ ;;
+ c54x-*)
+ basic_machine=tic54x-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ c55x-*)
+ basic_machine=tic55x-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ c6x-*)
+ basic_machine=tic6x-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ c90)
+ basic_machine=c90-cray
+ os=-unicos
+ ;;
+ cegcc)
+ basic_machine=arm-unknown
+ os=-cegcc
+ ;;
+ convex-c1)
+ basic_machine=c1-convex
+ os=-bsd
+ ;;
+ convex-c2)
+ basic_machine=c2-convex
+ os=-bsd
+ ;;
+ convex-c32)
+ basic_machine=c32-convex
+ os=-bsd
+ ;;
+ convex-c34)
+ basic_machine=c34-convex
+ os=-bsd
+ ;;
+ convex-c38)
+ basic_machine=c38-convex
+ os=-bsd
+ ;;
+ cray | j90)
+ basic_machine=j90-cray
+ os=-unicos
+ ;;
+ craynv)
+ basic_machine=craynv-cray
+ os=-unicosmp
+ ;;
+ cr16 | cr16-*)
+ basic_machine=cr16-unknown
+ os=-elf
+ ;;
+ crds | unos)
+ basic_machine=m68k-crds
+ ;;
+ crisv32 | crisv32-* | etraxfs*)
+ basic_machine=crisv32-axis
+ ;;
+ cris | cris-* | etrax*)
+ basic_machine=cris-axis
+ ;;
+ crx)
+ basic_machine=crx-unknown
+ os=-elf
+ ;;
+ da30 | da30-*)
+ basic_machine=m68k-da30
+ ;;
+ decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+ basic_machine=mips-dec
+ ;;
+ decsystem10* | dec10*)
+ basic_machine=pdp10-dec
+ os=-tops10
+ ;;
+ decsystem20* | dec20*)
+ basic_machine=pdp10-dec
+ os=-tops20
+ ;;
+ delta | 3300 | motorola-3300 | motorola-delta \
+ | 3300-motorola | delta-motorola)
+ basic_machine=m68k-motorola
+ ;;
+ delta88)
+ basic_machine=m88k-motorola
+ os=-sysv3
+ ;;
+ dicos)
+ basic_machine=i686-pc
+ os=-dicos
+ ;;
+ djgpp)
+ basic_machine=i586-pc
+ os=-msdosdjgpp
+ ;;
+ dpx20 | dpx20-*)
+ basic_machine=rs6000-bull
+ os=-bosx
+ ;;
+ dpx2*)
+ basic_machine=m68k-bull
+ os=-sysv3
+ ;;
+ e500v[12])
+ basic_machine=powerpc-unknown
+ os=$os"spe"
+ ;;
+ e500v[12]-*)
+ basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ os=$os"spe"
+ ;;
+ ebmon29k)
+ basic_machine=a29k-amd
+ os=-ebmon
+ ;;
+ elxsi)
+ basic_machine=elxsi-elxsi
+ os=-bsd
+ ;;
+ encore | umax | mmax)
+ basic_machine=ns32k-encore
+ ;;
+ es1800 | OSE68k | ose68k | ose | OSE)
+ basic_machine=m68k-ericsson
+ os=-ose
+ ;;
+ fx2800)
+ basic_machine=i860-alliant
+ ;;
+ genix)
+ basic_machine=ns32k-ns
+ ;;
+ gmicro)
+ basic_machine=tron-gmicro
+ os=-sysv
+ ;;
+ go32)
+ basic_machine=i386-pc
+ os=-go32
+ ;;
+ h3050r* | hiux*)
+ basic_machine=hppa1.1-hitachi
+ os=-hiuxwe2
+ ;;
+ h8300hms)
+ basic_machine=h8300-hitachi
+ os=-hms
+ ;;
+ h8300xray)
+ basic_machine=h8300-hitachi
+ os=-xray
+ ;;
+ h8500hms)
+ basic_machine=h8500-hitachi
+ os=-hms
+ ;;
+ harris)
+ basic_machine=m88k-harris
+ os=-sysv3
+ ;;
+ hp300-*)
+ basic_machine=m68k-hp
+ ;;
+ hp300bsd)
+ basic_machine=m68k-hp
+ os=-bsd
+ ;;
+ hp300hpux)
+ basic_machine=m68k-hp
+ os=-hpux
+ ;;
+ hp3k9[0-9][0-9] | hp9[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hp9k2[0-9][0-9] | hp9k31[0-9])
+ basic_machine=m68000-hp
+ ;;
+ hp9k3[2-9][0-9])
+ basic_machine=m68k-hp
+ ;;
+ hp9k6[0-9][0-9] | hp6[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hp9k7[0-79][0-9] | hp7[0-79][0-9])
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k78[0-9] | hp78[0-9])
+ # FIXME: really hppa2.0-hp
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+ # FIXME: really hppa2.0-hp
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[0-9][13679] | hp8[0-9][13679])
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[0-9][0-9] | hp8[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hppaosf)
+ basic_machine=hppa1.1-hp
+ os=-osf
+ ;;
+ hppro)
+ basic_machine=hppa1.1-hp
+ os=-proelf
+ ;;
+ i370-ibm* | ibm*)
+ basic_machine=i370-ibm
+ ;;
+ i*86v32)
+ basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'`
+ os=-sysv32
+ ;;
+ i*86v4*)
+ basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'`
+ os=-sysv4
+ ;;
+ i*86v)
+ basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'`
+ os=-sysv
+ ;;
+ i*86sol2)
+ basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'`
+ os=-solaris2
+ ;;
+ i386mach)
+ basic_machine=i386-mach
+ os=-mach
+ ;;
+ vsta)
+ basic_machine=i386-unknown
+ os=-vsta
+ ;;
+ iris | iris4d)
+ basic_machine=mips-sgi
+ case $os in
+ -irix*)
+ ;;
+ *)
+ os=-irix4
+ ;;
+ esac
+ ;;
+ isi68 | isi)
+ basic_machine=m68k-isi
+ os=-sysv
+ ;;
+ leon-*|leon[3-9]-*)
+ basic_machine=sparc-`echo "$basic_machine" | sed 's/-.*//'`
+ ;;
+ m68knommu)
+ basic_machine=m68k-unknown
+ os=-linux
+ ;;
+ m68knommu-*)
+ basic_machine=m68k-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ magnum | m3230)
+ basic_machine=mips-mips
+ os=-sysv
+ ;;
+ merlin)
+ basic_machine=ns32k-utek
+ os=-sysv
+ ;;
+ microblaze*)
+ basic_machine=microblaze-xilinx
+ ;;
+ mingw64)
+ basic_machine=x86_64-pc
+ os=-mingw64
+ ;;
+ mingw32)
+ basic_machine=i686-pc
+ os=-mingw32
+ ;;
+ mingw32ce)
+ basic_machine=arm-unknown
+ os=-mingw32ce
+ ;;
+ miniframe)
+ basic_machine=m68000-convergent
+ ;;
+ *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+ basic_machine=m68k-atari
+ os=-mint
+ ;;
+ mips3*-*)
+ basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'`
+ ;;
+ mips3*)
+ basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'`-unknown
+ ;;
+ monitor)
+ basic_machine=m68k-rom68k
+ os=-coff
+ ;;
+ morphos)
+ basic_machine=powerpc-unknown
+ os=-morphos
+ ;;
+ moxiebox)
+ basic_machine=moxie-unknown
+ os=-moxiebox
+ ;;
+ msdos)
+ basic_machine=i386-pc
+ os=-msdos
+ ;;
+ ms1-*)
+ basic_machine=`echo "$basic_machine" | sed -e 's/ms1-/mt-/'`
+ ;;
+ msys)
+ basic_machine=i686-pc
+ os=-msys
+ ;;
+ mvs)
+ basic_machine=i370-ibm
+ os=-mvs
+ ;;
+ nacl)
+ basic_machine=le32-unknown
+ os=-nacl
+ ;;
+ ncr3000)
+ basic_machine=i486-ncr
+ os=-sysv4
+ ;;
+ netbsd386)
+ basic_machine=i386-unknown
+ os=-netbsd
+ ;;
+ netwinder)
+ basic_machine=armv4l-rebel
+ os=-linux
+ ;;
+ news | news700 | news800 | news900)
+ basic_machine=m68k-sony
+ os=-newsos
+ ;;
+ news1000)
+ basic_machine=m68030-sony
+ os=-newsos
+ ;;
+ news-3600 | risc-news)
+ basic_machine=mips-sony
+ os=-newsos
+ ;;
+ necv70)
+ basic_machine=v70-nec
+ os=-sysv
+ ;;
+ next | m*-next)
+ basic_machine=m68k-next
+ case $os in
+ -nextstep* )
+ ;;
+ -ns2*)
+ os=-nextstep2
+ ;;
+ *)
+ os=-nextstep3
+ ;;
+ esac
+ ;;
+ nh3000)
+ basic_machine=m68k-harris
+ os=-cxux
+ ;;
+ nh[45]000)
+ basic_machine=m88k-harris
+ os=-cxux
+ ;;
+ nindy960)
+ basic_machine=i960-intel
+ os=-nindy
+ ;;
+ mon960)
+ basic_machine=i960-intel
+ os=-mon960
+ ;;
+ nonstopux)
+ basic_machine=mips-compaq
+ os=-nonstopux
+ ;;
+ np1)
+ basic_machine=np1-gould
+ ;;
+ neo-tandem)
+ basic_machine=neo-tandem
+ ;;
+ nse-tandem)
+ basic_machine=nse-tandem
+ ;;
+ nsr-tandem)
+ basic_machine=nsr-tandem
+ ;;
+ nsv-tandem)
+ basic_machine=nsv-tandem
+ ;;
+ nsx-tandem)
+ basic_machine=nsx-tandem
+ ;;
+ op50n-* | op60c-*)
+ basic_machine=hppa1.1-oki
+ os=-proelf
+ ;;
+ openrisc | openrisc-*)
+ basic_machine=or32-unknown
+ ;;
+ os400)
+ basic_machine=powerpc-ibm
+ os=-os400
+ ;;
+ OSE68000 | ose68000)
+ basic_machine=m68000-ericsson
+ os=-ose
+ ;;
+ os68k)
+ basic_machine=m68k-none
+ os=-os68k
+ ;;
+ pa-hitachi)
+ basic_machine=hppa1.1-hitachi
+ os=-hiuxwe2
+ ;;
+ paragon)
+ basic_machine=i860-intel
+ os=-osf
+ ;;
+ parisc)
+ basic_machine=hppa-unknown
+ os=-linux
+ ;;
+ parisc-*)
+ basic_machine=hppa-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ pbd)
+ basic_machine=sparc-tti
+ ;;
+ pbb)
+ basic_machine=m68k-tti
+ ;;
+ pc532 | pc532-*)
+ basic_machine=ns32k-pc532
+ ;;
+ pc98)
+ basic_machine=i386-pc
+ ;;
+ pc98-*)
+ basic_machine=i386-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ pentium | p5 | k5 | k6 | nexgen | viac3)
+ basic_machine=i586-pc
+ ;;
+ pentiumpro | p6 | 6x86 | athlon | athlon_*)
+ basic_machine=i686-pc
+ ;;
+ pentiumii | pentium2 | pentiumiii | pentium3)
+ basic_machine=i686-pc
+ ;;
+ pentium4)
+ basic_machine=i786-pc
+ ;;
+ pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+ basic_machine=i586-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ pentiumpro-* | p6-* | 6x86-* | athlon-*)
+ basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+ basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ pentium4-*)
+ basic_machine=i786-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ pn)
+ basic_machine=pn-gould
+ ;;
+ power) basic_machine=power-ibm
+ ;;
+ ppc | ppcbe) basic_machine=powerpc-unknown
+ ;;
+ ppc-* | ppcbe-*)
+ basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ ppcle | powerpclittle)
+ basic_machine=powerpcle-unknown
+ ;;
+ ppcle-* | powerpclittle-*)
+ basic_machine=powerpcle-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ ppc64) basic_machine=powerpc64-unknown
+ ;;
+ ppc64-*) basic_machine=powerpc64-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ ppc64le | powerpc64little)
+ basic_machine=powerpc64le-unknown
+ ;;
+ ppc64le-* | powerpc64little-*)
+ basic_machine=powerpc64le-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ ps2)
+ basic_machine=i386-ibm
+ ;;
+ pw32)
+ basic_machine=i586-unknown
+ os=-pw32
+ ;;
+ rdos | rdos64)
+ basic_machine=x86_64-pc
+ os=-rdos
+ ;;
+ rdos32)
+ basic_machine=i386-pc
+ os=-rdos
+ ;;
+ rom68k)
+ basic_machine=m68k-rom68k
+ os=-coff
+ ;;
+ rm[46]00)
+ basic_machine=mips-siemens
+ ;;
+ rtpc | rtpc-*)
+ basic_machine=romp-ibm
+ ;;
+ s390 | s390-*)
+ basic_machine=s390-ibm
+ ;;
+ s390x | s390x-*)
+ basic_machine=s390x-ibm
+ ;;
+ sa29200)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ sb1)
+ basic_machine=mipsisa64sb1-unknown
+ ;;
+ sb1el)
+ basic_machine=mipsisa64sb1el-unknown
+ ;;
+ sde)
+ basic_machine=mipsisa32-sde
+ os=-elf
+ ;;
+ sei)
+ basic_machine=mips-sei
+ os=-seiux
+ ;;
+ sequent)
+ basic_machine=i386-sequent
+ ;;
+ sh5el)
+ basic_machine=sh5le-unknown
+ ;;
+ simso-wrs)
+ basic_machine=sparclite-wrs
+ os=-vxworks
+ ;;
+ sps7)
+ basic_machine=m68k-bull
+ os=-sysv2
+ ;;
+ spur)
+ basic_machine=spur-unknown
+ ;;
+ st2000)
+ basic_machine=m68k-tandem
+ ;;
+ stratus)
+ basic_machine=i860-stratus
+ os=-sysv4
+ ;;
+ strongarm-* | thumb-*)
+ basic_machine=arm-`echo "$basic_machine" | sed 's/^[^-]*-//'`
+ ;;
+ sun2)
+ basic_machine=m68000-sun
+ ;;
+ sun2os3)
+ basic_machine=m68000-sun
+ os=-sunos3
+ ;;
+ sun2os4)
+ basic_machine=m68000-sun
+ os=-sunos4
+ ;;
+ sun3os3)
+ basic_machine=m68k-sun
+ os=-sunos3
+ ;;
+ sun3os4)
+ basic_machine=m68k-sun
+ os=-sunos4
+ ;;
+ sun4os3)
+ basic_machine=sparc-sun
+ os=-sunos3
+ ;;
+ sun4os4)
+ basic_machine=sparc-sun
+ os=-sunos4
+ ;;
+ sun4sol2)
+ basic_machine=sparc-sun
+ os=-solaris2
+ ;;
+ sun3 | sun3-*)
+ basic_machine=m68k-sun
+ ;;
+ sun4)
+ basic_machine=sparc-sun
+ ;;
+ sun386 | sun386i | roadrunner)
+ basic_machine=i386-sun
+ ;;
+ sv1)
+ basic_machine=sv1-cray
+ os=-unicos
+ ;;
+ symmetry)
+ basic_machine=i386-sequent
+ os=-dynix
+ ;;
+ t3e)
+ basic_machine=alphaev5-cray
+ os=-unicos
+ ;;
+ t90)
+ basic_machine=t90-cray
+ os=-unicos
+ ;;
+ tile*)
+ basic_machine=$basic_machine-unknown
+ os=-linux-gnu
+ ;;
+ tx39)
+ basic_machine=mipstx39-unknown
+ ;;
+ tx39el)
+ basic_machine=mipstx39el-unknown
+ ;;
+ toad1)
+ basic_machine=pdp10-xkl
+ os=-tops20
+ ;;
+ tower | tower-32)
+ basic_machine=m68k-ncr
+ ;;
+ tpf)
+ basic_machine=s390x-ibm
+ os=-tpf
+ ;;
+ udi29k)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ ultra3)
+ basic_machine=a29k-nyu
+ os=-sym1
+ ;;
+ v810 | necv810)
+ basic_machine=v810-nec
+ os=-none
+ ;;
+ vaxv)
+ basic_machine=vax-dec
+ os=-sysv
+ ;;
+ vms)
+ basic_machine=vax-dec
+ os=-vms
+ ;;
+ vpp*|vx|vx-*)
+ basic_machine=f301-fujitsu
+ ;;
+ vxworks960)
+ basic_machine=i960-wrs
+ os=-vxworks
+ ;;
+ vxworks68)
+ basic_machine=m68k-wrs
+ os=-vxworks
+ ;;
+ vxworks29k)
+ basic_machine=a29k-wrs
+ os=-vxworks
+ ;;
+ w65*)
+ basic_machine=w65-wdc
+ os=-none
+ ;;
+ w89k-*)
+ basic_machine=hppa1.1-winbond
+ os=-proelf
+ ;;
+ x64)
+ basic_machine=x86_64-pc
+ ;;
+ xbox)
+ basic_machine=i686-pc
+ os=-mingw32
+ ;;
+ xps | xps100)
+ basic_machine=xps100-honeywell
+ ;;
+ xscale-* | xscalee[bl]-*)
+ basic_machine=`echo "$basic_machine" | sed 's/^xscale/arm/'`
+ ;;
+ ymp)
+ basic_machine=ymp-cray
+ os=-unicos
+ ;;
+ none)
+ basic_machine=none-none
+ os=-none
+ ;;
+
+# Here we handle the default manufacturer of certain CPU types. It is in
+# some cases the only manufacturer, in others, it is the most popular.
+ w89k)
+ basic_machine=hppa1.1-winbond
+ ;;
+ op50n)
+ basic_machine=hppa1.1-oki
+ ;;
+ op60c)
+ basic_machine=hppa1.1-oki
+ ;;
+ romp)
+ basic_machine=romp-ibm
+ ;;
+ mmix)
+ basic_machine=mmix-knuth
+ ;;
+ rs6000)
+ basic_machine=rs6000-ibm
+ ;;
+ vax)
+ basic_machine=vax-dec
+ ;;
+ pdp11)
+ basic_machine=pdp11-dec
+ ;;
+ we32k)
+ basic_machine=we32k-att
+ ;;
+ sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
+ basic_machine=sh-unknown
+ ;;
+ cydra)
+ basic_machine=cydra-cydrome
+ ;;
+ orion)
+ basic_machine=orion-highlevel
+ ;;
+ orion105)
+ basic_machine=clipper-highlevel
+ ;;
+ mac | mpw | mac-mpw)
+ basic_machine=m68k-apple
+ ;;
+ pmac | pmac-mpw)
+ basic_machine=powerpc-apple
+ ;;
+ *-unknown)
+ # Make sure to match an already-canonicalized machine name.
+ ;;
+ *)
+ echo Invalid configuration \`"$1"\': machine \`"$basic_machine"\' not recognized 1>&2
+ exit 1
+ ;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+ *-digital*)
+ basic_machine=`echo "$basic_machine" | sed 's/digital.*/dec/'`
+ ;;
+ *-commodore*)
+ basic_machine=`echo "$basic_machine" | sed 's/commodore.*/cbm/'`
+ ;;
+ *)
+ ;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x$os != x ]
+then
+case $os in
+ # First match some system type aliases that might get confused
+ # with valid system types.
+ # -solaris* is a basic system type, with this one exception.
+ -auroraux)
+ os=-auroraux
+ ;;
+ -solaris1 | -solaris1.*)
+ os=`echo $os | sed -e 's|solaris1|sunos4|'`
+ ;;
+ -solaris)
+ os=-solaris2
+ ;;
+ -unixware*)
+ os=-sysv4.2uw
+ ;;
+ -gnu/linux*)
+ os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+ ;;
+ # es1800 is here to avoid being matched by es* (a different OS)
+ -es1800*)
+ os=-ose
+ ;;
+ # Now accept the basic system types.
+ # The portable systems comes first.
+ # Each alternative MUST end in a * to match a version number.
+ # -sysv* is not here because it comes later, after sysvr4.
+ -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
+ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
+ | -sym* | -kopensolaris* | -plan9* \
+ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+ | -aos* | -aros* | -cloudabi* | -sortix* \
+ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+ | -hiux* | -knetbsd* | -mirbsd* | -netbsd* \
+ | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \
+ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* | -hcos* \
+ | -chorusos* | -chorusrdb* | -cegcc* | -glidix* \
+ | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+ | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \
+ | -linux-newlib* | -linux-musl* | -linux-uclibc* \
+ | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \
+ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* \
+ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+ | -morphos* | -superux* | -rtmk* | -windiss* \
+ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \
+ | -onefs* | -tirtos* | -phoenix* | -fuchsia* | -redox* | -bme* \
+ | -midnightbsd*)
+ # Remember, each alternative MUST END IN *, to match a version number.
+ ;;
+ -qnx*)
+ case $basic_machine in
+ x86-* | i*86-*)
+ ;;
+ *)
+ os=-nto$os
+ ;;
+ esac
+ ;;
+ -nto-qnx*)
+ ;;
+ -nto*)
+ os=`echo $os | sed -e 's|nto|nto-qnx|'`
+ ;;
+ -sim | -xray | -os68k* | -v88r* \
+ | -windows* | -osx | -abug | -netware* | -os9* \
+ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+ ;;
+ -mac*)
+ os=`echo "$os" | sed -e 's|mac|macos|'`
+ ;;
+ -linux-dietlibc)
+ os=-linux-dietlibc
+ ;;
+ -linux*)
+ os=`echo $os | sed -e 's|linux|linux-gnu|'`
+ ;;
+ -sunos5*)
+ os=`echo "$os" | sed -e 's|sunos5|solaris2|'`
+ ;;
+ -sunos6*)
+ os=`echo "$os" | sed -e 's|sunos6|solaris3|'`
+ ;;
+ -opened*)
+ os=-openedition
+ ;;
+ -os400*)
+ os=-os400
+ ;;
+ -wince*)
+ os=-wince
+ ;;
+ -utek*)
+ os=-bsd
+ ;;
+ -dynix*)
+ os=-bsd
+ ;;
+ -acis*)
+ os=-aos
+ ;;
+ -atheos*)
+ os=-atheos
+ ;;
+ -syllable*)
+ os=-syllable
+ ;;
+ -386bsd)
+ os=-bsd
+ ;;
+ -ctix* | -uts*)
+ os=-sysv
+ ;;
+ -nova*)
+ os=-rtmk-nova
+ ;;
+ -ns2)
+ os=-nextstep2
+ ;;
+ -nsk*)
+ os=-nsk
+ ;;
+ # Preserve the version number of sinix5.
+ -sinix5.*)
+ os=`echo $os | sed -e 's|sinix|sysv|'`
+ ;;
+ -sinix*)
+ os=-sysv4
+ ;;
+ -tpf*)
+ os=-tpf
+ ;;
+ -triton*)
+ os=-sysv3
+ ;;
+ -oss*)
+ os=-sysv3
+ ;;
+ -svr4*)
+ os=-sysv4
+ ;;
+ -svr3)
+ os=-sysv3
+ ;;
+ -sysvr4)
+ os=-sysv4
+ ;;
+ # This must come after -sysvr4.
+ -sysv*)
+ ;;
+ -ose*)
+ os=-ose
+ ;;
+ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+ os=-mint
+ ;;
+ -zvmoe)
+ os=-zvmoe
+ ;;
+ -dicos*)
+ os=-dicos
+ ;;
+ -pikeos*)
+ # Until real need of OS specific support for
+ # particular features comes up, bare metal
+ # configurations are quite functional.
+ case $basic_machine in
+ arm*)
+ os=-eabi
+ ;;
+ *)
+ os=-elf
+ ;;
+ esac
+ ;;
+ -nacl*)
+ ;;
+ -ios)
+ ;;
+ -none)
+ ;;
+ -*-eabi)
+ case $basic_machine in
+ arm*)
+ ;;
+ esac
+ ;;
+ *)
+ # Get rid of the `-' at the beginning of $os.
+ os=`echo $os | sed 's/[^-]*-//'`
+ echo Invalid configuration \`"$1"\': system \`"$os"\' not recognized 1>&2
+ exit 1
+ ;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system. Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+ score-*)
+ os=-elf
+ ;;
+ spu-*)
+ os=-elf
+ ;;
+ *-acorn)
+ os=-riscix1.2
+ ;;
+ arm*-rebel)
+ os=-linux
+ ;;
+ arm*-semi)
+ os=-aout
+ ;;
+ c4x-* | tic4x-*)
+ os=-coff
+ ;;
+ c8051-*)
+ os=-elf
+ ;;
+ hexagon-*)
+ os=-elf
+ ;;
+ tic54x-*)
+ os=-coff
+ ;;
+ tic55x-*)
+ os=-coff
+ ;;
+ tic6x-*)
+ os=-coff
+ ;;
+ # This must come before the *-dec entry.
+ pdp10-*)
+ os=-tops20
+ ;;
+ pdp11-*)
+ os=-none
+ ;;
+ *-dec | vax-*)
+ os=-ultrix4.2
+ ;;
+ m68*-apollo)
+ os=-domain
+ ;;
+ i386-sun)
+ os=-sunos4.0.2
+ ;;
+ m68000-sun)
+ os=-sunos3
+ ;;
+ m68*-cisco)
+ os=-aout
+ ;;
+ mep-*)
+ os=-elf
+ ;;
+ mips*-cisco)
+ os=-elf
+ ;;
+ mips*-*)
+ os=-elf
+ ;;
+ or32-*)
+ os=-coff
+ ;;
+ *-tti) # must be before sparc entry or we get the wrong os.
+ os=-sysv3
+ ;;
+ sparc-* | *-sun)
+ os=-sunos4.1.1
+ ;;
+ pru-*)
+ os=-elf
+ ;;
+ *-be)
+ os=-beos
+ ;;
+ *-ibm)
+ os=-aix
+ ;;
+ *-knuth)
+ os=-mmixware
+ ;;
+ *-wec)
+ os=-proelf
+ ;;
+ *-winbond)
+ os=-proelf
+ ;;
+ *-oki)
+ os=-proelf
+ ;;
+ *-hp)
+ os=-hpux
+ ;;
+ *-hitachi)
+ os=-hiux
+ ;;
+ i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+ os=-sysv
+ ;;
+ *-cbm)
+ os=-amigaos
+ ;;
+ *-dg)
+ os=-dgux
+ ;;
+ *-dolphin)
+ os=-sysv3
+ ;;
+ m68k-ccur)
+ os=-rtu
+ ;;
+ m88k-omron*)
+ os=-luna
+ ;;
+ *-next)
+ os=-nextstep
+ ;;
+ *-sequent)
+ os=-ptx
+ ;;
+ *-crds)
+ os=-unos
+ ;;
+ *-ns)
+ os=-genix
+ ;;
+ i370-*)
+ os=-mvs
+ ;;
+ *-gould)
+ os=-sysv
+ ;;
+ *-highlevel)
+ os=-bsd
+ ;;
+ *-encore)
+ os=-bsd
+ ;;
+ *-sgi)
+ os=-irix
+ ;;
+ *-siemens)
+ os=-sysv4
+ ;;
+ *-masscomp)
+ os=-rtu
+ ;;
+ f30[01]-fujitsu | f700-fujitsu)
+ os=-uxpv
+ ;;
+ *-rom68k)
+ os=-coff
+ ;;
+ *-*bug)
+ os=-coff
+ ;;
+ *-apple)
+ os=-macos
+ ;;
+ *-atari*)
+ os=-mint
+ ;;
+ *)
+ os=-none
+ ;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer. We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+ *-unknown)
+ case $os in
+ -riscix*)
+ vendor=acorn
+ ;;
+ -sunos*)
+ vendor=sun
+ ;;
+ -cnk*|-aix*)
+ vendor=ibm
+ ;;
+ -beos*)
+ vendor=be
+ ;;
+ -hpux*)
+ vendor=hp
+ ;;
+ -mpeix*)
+ vendor=hp
+ ;;
+ -hiux*)
+ vendor=hitachi
+ ;;
+ -unos*)
+ vendor=crds
+ ;;
+ -dgux*)
+ vendor=dg
+ ;;
+ -luna*)
+ vendor=omron
+ ;;
+ -genix*)
+ vendor=ns
+ ;;
+ -mvs* | -opened*)
+ vendor=ibm
+ ;;
+ -os400*)
+ vendor=ibm
+ ;;
+ -ptx*)
+ vendor=sequent
+ ;;
+ -tpf*)
+ vendor=ibm
+ ;;
+ -vxsim* | -vxworks* | -windiss*)
+ vendor=wrs
+ ;;
+ -aux*)
+ vendor=apple
+ ;;
+ -hms*)
+ vendor=hitachi
+ ;;
+ -mpw* | -macos*)
+ vendor=apple
+ ;;
+ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+ vendor=atari
+ ;;
+ -vos*)
+ vendor=stratus
+ ;;
+ esac
+ basic_machine=`echo "$basic_machine" | sed "s/unknown/$vendor/"`
+ ;;
+esac
+
+echo "$basic_machine$os"
+exit
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/libcody/build-aux/install-sh b/libcody/build-aux/install-sh
new file mode 100755
index 0000000..8175c64
--- /dev/null
+++ b/libcody/build-aux/install-sh
@@ -0,0 +1,518 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2018-03-11.20; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# 'make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+tab=' '
+nl='
+'
+IFS=" $tab$nl"
+
+# Set DOITPROG to "echo" to test this script.
+
+doit=${DOITPROG-}
+doit_exec=${doit:-exec}
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+is_target_a_directory=possibly
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+ or: $0 [OPTION]... SRCFILES... DIRECTORY
+ or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+ or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+ --help display this help and exit.
+ --version display version info and exit.
+
+ -c (ignored)
+ -C install only if different (preserve the last data modification time)
+ -d create directories instead of installing files.
+ -g GROUP $chgrpprog installed files to GROUP.
+ -m MODE $chmodprog installed files to MODE.
+ -o USER $chownprog installed files to USER.
+ -s $stripprog installed files.
+ -t DIRECTORY install into DIRECTORY.
+ -T report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+ CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+ RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+ case $1 in
+ -c) ;;
+
+ -C) copy_on_change=true;;
+
+ -d) dir_arg=true;;
+
+ -g) chgrpcmd="$chgrpprog $2"
+ shift;;
+
+ --help) echo "$usage"; exit $?;;
+
+ -m) mode=$2
+ case $mode in
+ *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*)
+ echo "$0: invalid mode: $mode" >&2
+ exit 1;;
+ esac
+ shift;;
+
+ -o) chowncmd="$chownprog $2"
+ shift;;
+
+ -s) stripcmd=$stripprog;;
+
+ -t)
+ is_target_a_directory=always
+ dst_arg=$2
+ # Protect names problematic for 'test' and other utilities.
+ case $dst_arg in
+ -* | [=\(\)!]) dst_arg=./$dst_arg;;
+ esac
+ shift;;
+
+ -T) is_target_a_directory=never;;
+
+ --version) echo "$0 $scriptversion"; exit $?;;
+
+ --) shift
+ break;;
+
+ -*) echo "$0: invalid option: $1" >&2
+ exit 1;;
+
+ *) break;;
+ esac
+ shift
+done
+
+# We allow the use of options -d and -T together, by making -d
+# take the precedence; this is for compatibility with GNU install.
+
+if test -n "$dir_arg"; then
+ if test -n "$dst_arg"; then
+ echo "$0: target directory not allowed when installing a directory." >&2
+ exit 1
+ fi
+fi
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+ # When -d is used, all remaining arguments are directories to create.
+ # When -t is used, the destination is already specified.
+ # Otherwise, the last argument is the destination. Remove it from $@.
+ for arg
+ do
+ if test -n "$dst_arg"; then
+ # $@ is not empty: it contains at least $arg.
+ set fnord "$@" "$dst_arg"
+ shift # fnord
+ fi
+ shift # arg
+ dst_arg=$arg
+ # Protect names problematic for 'test' and other utilities.
+ case $dst_arg in
+ -* | [=\(\)!]) dst_arg=./$dst_arg;;
+ esac
+ done
+fi
+
+if test $# -eq 0; then
+ if test -z "$dir_arg"; then
+ echo "$0: no input file specified." >&2
+ exit 1
+ fi
+ # It's OK to call 'install-sh -d' without argument.
+ # This can happen when creating conditional directories.
+ exit 0
+fi
+
+if test -z "$dir_arg"; then
+ if test $# -gt 1 || test "$is_target_a_directory" = always; then
+ if test ! -d "$dst_arg"; then
+ echo "$0: $dst_arg: Is not a directory." >&2
+ exit 1
+ fi
+ fi
+fi
+
+if test -z "$dir_arg"; then
+ do_exit='(exit $ret); exit $ret'
+ trap "ret=129; $do_exit" 1
+ trap "ret=130; $do_exit" 2
+ trap "ret=141; $do_exit" 13
+ trap "ret=143; $do_exit" 15
+
+ # Set umask so as not to create temps with too-generous modes.
+ # However, 'strip' requires both read and write access to temps.
+ case $mode in
+ # Optimize common cases.
+ *644) cp_umask=133;;
+ *755) cp_umask=22;;
+
+ *[0-7])
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw='% 200'
+ fi
+ cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+ *)
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw=,u+rw
+ fi
+ cp_umask=$mode$u_plus_rw;;
+ esac
+fi
+
+for src
+do
+ # Protect names problematic for 'test' and other utilities.
+ case $src in
+ -* | [=\(\)!]) src=./$src;;
+ esac
+
+ if test -n "$dir_arg"; then
+ dst=$src
+ dstdir=$dst
+ test -d "$dstdir"
+ dstdir_status=$?
+ else
+
+ # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+ # might cause directories to be created, which would be especially bad
+ # if $src (and thus $dsttmp) contains '*'.
+ if test ! -f "$src" && test ! -d "$src"; then
+ echo "$0: $src does not exist." >&2
+ exit 1
+ fi
+
+ if test -z "$dst_arg"; then
+ echo "$0: no destination specified." >&2
+ exit 1
+ fi
+ dst=$dst_arg
+
+ # If destination is a directory, append the input filename.
+ if test -d "$dst"; then
+ if test "$is_target_a_directory" = never; then
+ echo "$0: $dst_arg: Is a directory" >&2
+ exit 1
+ fi
+ dstdir=$dst
+ dstbase=`basename "$src"`
+ case $dst in
+ */) dst=$dst$dstbase;;
+ *) dst=$dst/$dstbase;;
+ esac
+ dstdir_status=0
+ else
+ dstdir=`dirname "$dst"`
+ test -d "$dstdir"
+ dstdir_status=$?
+ fi
+ fi
+
+ case $dstdir in
+ */) dstdirslash=$dstdir;;
+ *) dstdirslash=$dstdir/;;
+ esac
+
+ obsolete_mkdir_used=false
+
+ if test $dstdir_status != 0; then
+ case $posix_mkdir in
+ '')
+ # Create intermediate dirs using mode 755 as modified by the umask.
+ # This is like FreeBSD 'install' as of 1997-10-28.
+ umask=`umask`
+ case $stripcmd.$umask in
+ # Optimize common cases.
+ *[2367][2367]) mkdir_umask=$umask;;
+ .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+ *[0-7])
+ mkdir_umask=`expr $umask + 22 \
+ - $umask % 100 % 40 + $umask % 20 \
+ - $umask % 10 % 4 + $umask % 2
+ `;;
+ *) mkdir_umask=$umask,go-w;;
+ esac
+
+ # With -d, create the new directory with the user-specified mode.
+ # Otherwise, rely on $mkdir_umask.
+ if test -n "$dir_arg"; then
+ mkdir_mode=-m$mode
+ else
+ mkdir_mode=
+ fi
+
+ posix_mkdir=false
+ case $umask in
+ *[123567][0-7][0-7])
+ # POSIX mkdir -p sets u+wx bits regardless of umask, which
+ # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+ ;;
+ *)
+ # Note that $RANDOM variable is not portable (e.g. dash); Use it
+ # here however when possible just to lower collision chance.
+ tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+
+ trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+ # Because "mkdir -p" follows existing symlinks and we likely work
+ # directly in world-writeable /tmp, make sure that the '$tmpdir'
+ # directory is successfully created first before we actually test
+ # 'mkdir -p' feature.
+ if (umask $mkdir_umask &&
+ $mkdirprog $mkdir_mode "$tmpdir" &&
+ exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1
+ then
+ if test -z "$dir_arg" || {
+ # Check for POSIX incompatibilities with -m.
+ # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+ # other-writable bit of parent directory when it shouldn't.
+ # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+ test_tmpdir="$tmpdir/a"
+ ls_ld_tmpdir=`ls -ld "$test_tmpdir"`
+ case $ls_ld_tmpdir in
+ d????-?r-*) different_mode=700;;
+ d????-?--*) different_mode=755;;
+ *) false;;
+ esac &&
+ $mkdirprog -m$different_mode -p -- "$test_tmpdir" && {
+ ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"`
+ test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+ }
+ }
+ then posix_mkdir=:
+ fi
+ rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir"
+ else
+ # Remove any dirs left behind by ancient mkdir implementations.
+ rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null
+ fi
+ trap '' 0;;
+ esac;;
+ esac
+
+ if
+ $posix_mkdir && (
+ umask $mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+ )
+ then :
+ else
+
+ # The umask is ridiculous, or mkdir does not conform to POSIX,
+ # or it failed possibly due to a race condition. Create the
+ # directory the slow way, step by step, checking for races as we go.
+
+ case $dstdir in
+ /*) prefix='/';;
+ [-=\(\)!]*) prefix='./';;
+ *) prefix='';;
+ esac
+
+ oIFS=$IFS
+ IFS=/
+ set -f
+ set fnord $dstdir
+ shift
+ set +f
+ IFS=$oIFS
+
+ prefixes=
+
+ for d
+ do
+ test X"$d" = X && continue
+
+ prefix=$prefix$d
+ if test -d "$prefix"; then
+ prefixes=
+ else
+ if $posix_mkdir; then
+ (umask=$mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+ # Don't fail if two instances are running concurrently.
+ test -d "$prefix" || exit 1
+ else
+ case $prefix in
+ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+ *) qprefix=$prefix;;
+ esac
+ prefixes="$prefixes '$qprefix'"
+ fi
+ fi
+ prefix=$prefix/
+ done
+
+ if test -n "$prefixes"; then
+ # Don't fail if two instances are running concurrently.
+ (umask $mkdir_umask &&
+ eval "\$doit_exec \$mkdirprog $prefixes") ||
+ test -d "$dstdir" || exit 1
+ obsolete_mkdir_used=true
+ fi
+ fi
+ fi
+
+ if test -n "$dir_arg"; then
+ { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+ { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+ else
+
+ # Make a couple of temp file names in the proper directory.
+ dsttmp=${dstdirslash}_inst.$$_
+ rmtmp=${dstdirslash}_rm.$$_
+
+ # Trap to clean up those temp files at exit.
+ trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+ # Copy the file name to the temp name.
+ (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+ # and set any options; do chmod last to preserve setuid bits.
+ #
+ # If any of these fail, we abort the whole thing. If we want to
+ # ignore errors from any of these, just make sure not to ignore
+ # errors from the above "$doit $cpprog $src $dsttmp" command.
+ #
+ { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+ { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+ { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+ # If -C, don't bother to copy if it wouldn't change the file.
+ if $copy_on_change &&
+ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
+ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
+ set -f &&
+ set X $old && old=:$2:$4:$5:$6 &&
+ set X $new && new=:$2:$4:$5:$6 &&
+ set +f &&
+ test "$old" = "$new" &&
+ $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+ then
+ rm -f "$dsttmp"
+ else
+ # Rename the file to the real destination.
+ $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+ # The rename failed, perhaps because mv can't rename something else
+ # to itself, or perhaps because mv is so ancient that it does not
+ # support -f.
+ {
+ # Now remove or move aside any old file at destination location.
+ # We try this two ways since rm can't unlink itself on some
+ # systems and the destination file might be busy for other
+ # reasons. In this case, the final cleanup might fail but the new
+ # file should still install successfully.
+ {
+ test ! -f "$dst" ||
+ $doit $rmcmd -f "$dst" 2>/dev/null ||
+ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+ { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
+ } ||
+ { echo "$0: cannot unlink or rename $dst" >&2
+ (exit 1); exit 1
+ }
+ } &&
+
+ # Now rename the file to the real destination.
+ $doit $mvcmd "$dsttmp" "$dst"
+ }
+ fi || exit 1
+
+ trap '' 0
+ fi
+done
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC0"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/libcody/client.cc b/libcody/client.cc
new file mode 100644
index 0000000..54111b8
--- /dev/null
+++ b/libcody/client.cc
@@ -0,0 +1,336 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+// C
+#include <cerrno>
+#include <cstring>
+
+// Client code
+
+namespace Cody {
+
+// These do not need to be members
+static Packet ConnectResponse (std::vector<std::string> &words);
+static Packet PathnameResponse (std::vector<std::string> &words);
+static Packet OKResponse (std::vector<std::string> &words);
+static Packet IncludeTranslateResponse (std::vector<std::string> &words);
+
+// Must be consistently ordered with the RequestCode enum
+static Packet (*const responseTable[Detail::RC_HWM])
+ (std::vector<std::string> &) =
+ {
+ &ConnectResponse,
+ &PathnameResponse,
+ &PathnameResponse,
+ &PathnameResponse,
+ &OKResponse,
+ &IncludeTranslateResponse,
+ };
+
+Client::Client ()
+{
+ fd.from = fd.to = -1;
+}
+
+Client::Client (Client &&src)
+ : write (std::move (src.write)),
+ read (std::move (src.read)),
+ corked (std::move (src.corked)),
+ is_direct (src.is_direct),
+ is_connected (src.is_connected)
+{
+ if (is_direct)
+ server = src.server;
+ else
+ {
+ fd.from = src.fd.from;
+ fd.to = src.fd.to;
+ }
+}
+
+Client::~Client ()
+{
+}
+
+Client &Client::operator= (Client &&src)
+{
+ write = std::move (src.write);
+ read = std::move (src.read);
+ corked = std::move (src.corked);
+ is_direct = src.is_direct;
+ is_connected = src.is_connected;
+ if (is_direct)
+ server = src.server;
+ else
+ {
+ fd.from = src.fd.from;
+ fd.to = src.fd.to;
+ }
+
+ return *this;
+}
+
+int Client::CommunicateWithServer ()
+{
+ write.PrepareToWrite ();
+ read.PrepareToRead ();
+ if (IsDirect ())
+ server->DirectProcess (write, read);
+ else
+ {
+ // Write the write buffer
+ while (int e = write.Write (fd.to))
+ if (e != EAGAIN && e != EINTR)
+ return e;
+ // Read the read buffer
+ while (int e = read.Read (fd.from))
+ if (e != EAGAIN && e != EINTR)
+ return e;
+ }
+
+ return 0;
+}
+
+static Packet CommunicationError (int err)
+{
+ std::string e {u8"communication error:"};
+ e.append (strerror (err));
+
+ return Packet (Client::PC_ERROR, std::move (e));
+}
+
+Packet Client::ProcessResponse (std::vector<std::string> &words,
+ unsigned code, bool isLast)
+{
+ if (int e = read.Lex (words))
+ {
+ if (e == EINVAL)
+ {
+ std::string msg (u8"malformed string '");
+ msg.append (words[0]);
+ msg.append (u8"'");
+ return Packet (Client::PC_ERROR, std::move (msg));
+ }
+ else
+ return Packet (Client::PC_ERROR, u8"missing response");
+ }
+
+ Assert (!words.empty ());
+ if (words[0] == u8"ERROR")
+ return Packet (Client::PC_ERROR,
+ std::move (words.size () == 2 ? words[1]
+ : u8"malformed error response"));
+
+ if (isLast && !read.IsAtEnd ())
+ return Packet (Client::PC_ERROR,
+ std::string (u8"unexpected extra response"));
+
+ Assert (code < Detail::RC_HWM);
+ Packet result (responseTable[code] (words));
+ result.SetRequest (code);
+ if (result.GetCode () == Client::PC_ERROR && result.GetString ().empty ())
+ {
+ std::string msg {u8"malformed response '"};
+
+ read.LexedLine (msg);
+ msg.append (u8"'");
+ result.GetString () = std::move (msg);
+ }
+ else if (result.GetCode () == Client::PC_CONNECT)
+ is_connected = true;
+
+ return result;
+}
+
+Packet Client::MaybeRequest (unsigned code)
+{
+ if (IsCorked ())
+ {
+ corked.push_back (code);
+ return Packet (PC_CORKED);
+ }
+
+ if (int err = CommunicateWithServer ())
+ return CommunicationError (err);
+
+ std::vector<std::string> words;
+ return ProcessResponse(words, code, true);
+}
+
+void Client::Cork ()
+{
+ if (corked.empty ())
+ corked.push_back (-1);
+}
+
+std::vector<Packet> Client::Uncork ()
+{
+ std::vector<Packet> result;
+
+ if (corked.size () > 1)
+ {
+ if (int err = CommunicateWithServer ())
+ result.emplace_back (CommunicationError (err));
+ else
+ {
+ std::vector<std::string> words;
+ for (auto iter = corked.begin () + 1; iter != corked.end ();)
+ {
+ char code = *iter;
+ ++iter;
+ result.emplace_back (ProcessResponse (words, code,
+ iter == corked.end ()));
+ }
+ }
+ }
+
+ corked.clear ();
+
+ return result;
+}
+
+// Now the individual message handlers
+
+// HELLO $vernum $agent $ident
+Packet Client::Connect (char const *agent, char const *ident,
+ size_t alen, size_t ilen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"HELLO");
+ write.AppendInteger (Version);
+ write.AppendWord (agent, true, alen);
+ write.AppendWord (ident, true, ilen);
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_CONNECT);
+}
+
+// HELLO $version $agent [$flags]
+Packet ConnectResponse (std::vector<std::string> &words)
+{
+ if (words[0] == u8"HELLO" && (words.size () == 3 || words.size () == 4))
+ {
+ char *eptr;
+ unsigned long val = strtoul (words[1].c_str (), &eptr, 10);
+
+ unsigned version = unsigned (val);
+ if (*eptr || version != val || version < Version)
+ return Packet (Client::PC_ERROR, u8"incompatible version");
+ else
+ {
+ unsigned flags = 0;
+ if (words.size () == 4)
+ {
+ val = strtoul (words[3].c_str (), &eptr, 10);
+ flags = unsigned (val);
+ }
+ return Packet (Client::PC_CONNECT, flags);
+ }
+ }
+
+ return Packet (Client::PC_ERROR, u8"");
+}
+
+// MODULE-REPO
+Packet Client::ModuleRepo ()
+{
+ write.BeginLine ();
+ write.AppendWord (u8"MODULE-REPO");
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_MODULE_REPO);
+}
+
+// PATHNAME $dir | ERROR
+Packet PathnameResponse (std::vector<std::string> &words)
+{
+ if (words[0] == u8"PATHNAME" && words.size () == 2)
+ return Packet (Client::PC_PATHNAME, std::move (words[1]));
+
+ return Packet (Client::PC_ERROR, u8"");
+}
+
+// OK or ERROR
+Packet OKResponse (std::vector<std::string> &words)
+{
+ if (words[0] == u8"OK")
+ return Packet (Client::PC_OK);
+ else
+ return Packet (Client::PC_ERROR,
+ words.size () == 2 ? std::move (words[1]) : "");
+}
+
+// MODULE-EXPORT $modulename [$flags]
+Packet Client::ModuleExport (char const *module, Flags flags, size_t mlen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"MODULE-EXPORT");
+ write.AppendWord (module, true, mlen);
+ if (flags != Flags::None)
+ write.AppendInteger (unsigned (flags));
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_MODULE_EXPORT);
+}
+
+// MODULE-IMPORT $modulename [$flags]
+Packet Client::ModuleImport (char const *module, Flags flags, size_t mlen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"MODULE-IMPORT");
+ write.AppendWord (module, true, mlen);
+ if (flags != Flags::None)
+ write.AppendInteger (unsigned (flags));
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_MODULE_IMPORT);
+}
+
+// MODULE-COMPILED $modulename [$flags]
+Packet Client::ModuleCompiled (char const *module, Flags flags, size_t mlen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"MODULE-COMPILED");
+ write.AppendWord (module, true, mlen);
+ if (flags != Flags::None)
+ write.AppendInteger (unsigned (flags));
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_MODULE_COMPILED);
+}
+
+// INCLUDE-TRANSLATE $includename [$flags]
+Packet Client::IncludeTranslate (char const *include, Flags flags, size_t ilen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"INCLUDE-TRANSLATE");
+ write.AppendWord (include, true, ilen);
+ if (flags != Flags::None)
+ write.AppendInteger (unsigned (flags));
+ write.EndLine ();
+
+ return MaybeRequest (Detail::RC_INCLUDE_TRANSLATE);
+}
+
+// BOOL $knowntextualness
+// PATHNAME $cmifile
+Packet IncludeTranslateResponse (std::vector<std::string> &words)
+{
+ if (words[0] == u8"BOOL" && words.size () == 2)
+ {
+ if (words[1] == u8"FALSE")
+ return Packet (Client::PC_BOOL, 0);
+ else if (words[1] == u8"TRUE")
+ return Packet (Client::PC_BOOL, 1);
+ else
+ return Packet (Client::PC_ERROR, u8"");
+ }
+ else
+ return PathnameResponse (words);
+}
+
+}
+
diff --git a/libcody/cmake/libcody-config-ix.cmake b/libcody/cmake/libcody-config-ix.cmake
new file mode 100644
index 0000000..0606e85
--- /dev/null
+++ b/libcody/cmake/libcody-config-ix.cmake
@@ -0,0 +1,43 @@
+# message(STATUS "*top config-ix* CMAKE_SYSTEM : ${CMAKE_SYSTEM}")
+
+include(CheckIncludeFile)
+include(CheckIncludeFileCXX)
+#include(CheckLibraryExists)
+#include(CheckSymbolExists)
+include(CheckFunctionExists)
+#include(CheckCXXSourceCompiles)
+#include(TestBigEndian)
+include(CheckCCompilerFlag)
+include(CheckCXXCompilerFlag)
+
+# Flags
+
+check_cxx_compiler_flag(-stdlib=libc++ LIBCODY_CXX_HAS_STDLIB_FLAG)
+
+check_cxx_compiler_flag(-fno-enforce-eh-specs LIBCODY_HAS_NOENFORCE)
+check_cxx_compiler_flag(-fno-stack-protector LIBCODY_HAS_NOSTACKPROT)
+check_cxx_compiler_flag(-fno-threadsafe-statics LIBCODY_HAS_NOTHREADSAFESTATICS)
+
+check_cxx_compiler_flag(-Wno-gnu-zero-variadic-macro-arguments LIBCODY_CXX_W_GZVMA)
+
+# Address github issue #10
+if (NOT CODY_WITHEXCEPTIONS)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions -fno-rtti")
+ if (LIBCODY_HAS_NOENFORCE)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-enforce-eh-specs")
+ endif()
+endif()
+
+if (LIBCODY_HAS_NOSTACKPROT)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-stack-protector")
+endif()
+if (LIBCODY_HAS_NOTHREADSAFESTATICS)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-threadsafe-statics")
+endif()
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W -Wall -Woverloaded-virtual -Wshadow")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-invalid-offsetof -Wno-unused-variable")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-missing-field-initializers")
+if (LIBCODY_CXX_W_GZVMA)
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-gnu-zero-variadic-macro-arguments")
+endif ()
diff --git a/libcody/cody.hh b/libcody/cody.hh
new file mode 100644
index 0000000..31d9c18
--- /dev/null
+++ b/libcody/cody.hh
@@ -0,0 +1,800 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+#ifndef CODY_HH
+#define CODY_HH 1
+
+// Have a known-good list of networking systems
+#if defined (__unix__) || defined (__MACH__)
+#define CODY_NETWORKING 1
+#else
+#define CODY_NETWORKING 0
+#endif
+#if 0 // For testing
+#undef CODY_NETWORKING
+#define CODY_NETWORKING 0
+#endif
+
+// C++
+#include <memory>
+#include <string>
+#include <vector>
+// C
+#include <cstddef>
+// OS
+#include <errno.h>
+#include <sys/types.h>
+#if CODY_NETWORKING
+#include <sys/socket.h>
+#endif
+
+namespace Cody {
+
+// Set version to 1, as this is completely incompatible with 0.
+// Fortunately both versions 0 and 1 will recognize each other's HELLO
+// messages sufficiently to error out
+constexpr unsigned Version = 1;
+
+// FIXME: I guess we need a file-handle abstraction here
+// Is windows DWORDPTR still?, or should it be FILE *? (ew).
+
+namespace Detail {
+
+// C++11 doesn't have utf8 character literals :(
+
+template<unsigned I>
+constexpr char S2C (char const (&s)[I])
+{
+ static_assert (I == 2, "only single octet strings may be converted");
+ return s[0];
+}
+
+/// Internal buffering class. Used to concatenate outgoing messages
+/// and Lex incoming ones.
+class MessageBuffer
+{
+ std::vector<char> buffer; ///< buffer holding the message
+ size_t lastBol = 0; ///< location of the most recent Beginning Of
+ ///< Line, or position we've readed when writing
+
+public:
+ MessageBuffer () = default;
+ ~MessageBuffer () = default;
+ MessageBuffer (MessageBuffer &&) = default;
+ MessageBuffer &operator= (MessageBuffer &&) = default;
+
+public:
+ ///
+ /// Finalize a buffer to be written. No more lines can be added to
+ /// the buffer. Use before a sequence of Write calls.
+ void PrepareToWrite ()
+ {
+ buffer.push_back (u8"\n"[0]);
+ lastBol = 0;
+ }
+ ///
+ /// Prepare a buffer for reading. Use before a sequence of Read calls.
+ void PrepareToRead ()
+ {
+ buffer.clear ();
+ lastBol = 0;
+ }
+
+public:
+ /// Begin a message line. Use before a sequence of Append and
+ /// related calls.
+ void BeginLine ();
+ /// End a message line. Use after a sequence of Append and related calls.
+ void EndLine () {}
+
+public:
+ /// Append a string to the current line. No whitespace is prepended
+ /// or appended.
+ ///
+ /// @param str the string to be written
+ /// @param maybe_quote indicate if there's a possibility the string
+ /// contains characters that need quoting. Defaults to false.
+ /// It is always safe to set
+ /// this true, but that causes an additional scan of the string.
+ /// @param len The length of the string. If not specified, strlen
+ /// is used to find the length.
+ void Append (char const *str, bool maybe_quote = false,
+ size_t len = ~size_t (0));
+
+ ///
+ /// Add whitespace word separator. Multiple adjacent whitespace is fine.
+ void Space ()
+ {
+ Append (Detail::S2C(u8" "));
+ }
+
+public:
+ /// Add a word as with Append, but prefixing whitespace to make a
+ /// separate word
+ void AppendWord (char const *str, bool maybe_quote = false,
+ size_t len = ~size_t (0))
+ {
+ if (buffer.size () != lastBol)
+ Space ();
+ Append (str, maybe_quote, len);
+ }
+ /// Add a word as with AppendWord
+ /// @param str the string to append
+ /// @param maybe_quote string might need quoting, as for Append
+ void AppendWord (std::string const &str, bool maybe_quote = false)
+ {
+ AppendWord (str.data (), maybe_quote, str.size ());
+ }
+ ///
+ /// Add an integral value, prepending a space.
+ void AppendInteger (unsigned u);
+
+private:
+ /// Append a literal character.
+ /// @param c character to append
+ void Append (char c);
+
+public:
+ /// Lex the next input line into a vector of words.
+ /// @param words filled with a vector of lexed strings
+ /// @result 0 if no errors, an errno value on lexxing error such as
+ /// there being no next line (ENOENT), or malformed quoting (EINVAL)
+ int Lex (std::vector<std::string> &words);
+
+public:
+ /// Append the most-recently lexxed line to a string. May be useful
+ /// in error messages. The unparsed line is appended -- before any
+ /// unquoting.
+ /// If we had c++17 string_view, we'd simply return a view of the
+ /// line, and leave it to the caller to do any concatenation.
+ /// @param l string to-which the lexxed line is appended.
+ void LexedLine (std::string &l);
+
+public:
+ /// Detect if we have reached the end of the input buffer.
+ /// I.e. there are no more lines to Lex
+ /// @result True if at end
+ bool IsAtEnd () const
+ {
+ return lastBol == buffer.size ();
+ }
+
+public:
+ /// Read from end point into a read buffer, as with read(2). This will
+ /// not block , unless FD is blocking, and there is nothing
+ /// immediately available.
+ /// @param fd file descriptor to read from. This may be a regular
+ /// file, pipe or socket.
+ /// @result on error returns errno. If end of file occurs, returns
+ /// -1. At end of message returns 0. If there is more needed
+ /// returns EAGAIN (or possibly EINTR). If the message is
+ /// malformed, returns EINVAL.
+ int Read (int fd) noexcept;
+
+public:
+ /// Write to an end point from a write buffer, as with write(2). As
+ /// with Read, this will not usually block.
+ /// @param fd file descriptor to write to. This may be a regular
+ /// file, pipe or socket.
+ /// @result on error returns errno.
+ /// At end of message returns 0. If there is more to write
+ /// returns EAGAIN (or possibly EINTR).
+ int Write (int fd) noexcept;
+};
+
+///
+/// Request codes. Perhaps this should be exposed? These are likely
+/// useful to servers that queue requests.
+enum RequestCode
+{
+ RC_CONNECT,
+ RC_MODULE_REPO,
+ RC_MODULE_EXPORT,
+ RC_MODULE_IMPORT,
+ RC_MODULE_COMPILED,
+ RC_INCLUDE_TRANSLATE,
+ RC_HWM
+};
+
+/// Internal file descriptor tuple. It's used as an anonymous union member.
+struct FD
+{
+ int from; ///< Read from this FD
+ int to; ///< Write to this FD
+};
+
+}
+
+// Flags for various requests
+enum class Flags : unsigned
+{
+ None,
+ NameOnly = 1<<0, // Only querying for CMI names, not contents
+};
+
+inline Flags operator& (Flags a, Flags b)
+{
+ return Flags (unsigned (a) & unsigned (b));
+}
+inline Flags operator| (Flags a, Flags b)
+{
+ return Flags (unsigned (a) | unsigned (b));
+}
+
+///
+/// Response data for a request. Returned by Client's request calls,
+/// which return a single Packet. When the connection is Corked, the
+/// Uncork call will return a vector of Packets.
+class Packet
+{
+public:
+ ///
+ /// Packet is a variant structure. These are the possible content types.
+ enum Category { INTEGER, STRING, VECTOR};
+
+private:
+ // std:variant is a C++17 thing, so we're doing this ourselves.
+ union
+ {
+ size_t integer; ///< Integral value
+ std::string string; ///< String value
+ std::vector<std::string> vector; ///< Vector of string value
+ };
+ Category cat : 2; ///< Discriminator
+
+private:
+ unsigned short code = 0; ///< Packet type
+ unsigned short request = 0;
+
+public:
+ Packet (unsigned c, size_t i = 0)
+ : integer (i), cat (INTEGER), code (c)
+ {
+ }
+ Packet (unsigned c, std::string &&s)
+ : string (std::move (s)), cat (STRING), code (c)
+ {
+ }
+ Packet (unsigned c, std::string const &s)
+ : string (s), cat (STRING), code (c)
+ {
+ }
+ Packet (unsigned c, std::vector<std::string> &&v)
+ : vector (std::move (v)), cat (VECTOR), code (c)
+ {
+ }
+ // No non-move constructor from a vector. You should not be doing
+ // that.
+
+ // Only move constructor and move assignment
+ Packet (Packet &&t)
+ {
+ Create (std::move (t));
+ }
+ Packet &operator= (Packet &&t)
+ {
+ Destroy ();
+ Create (std::move (t));
+
+ return *this;
+ }
+ ~Packet ()
+ {
+ Destroy ();
+ }
+
+private:
+ ///
+ /// Variant move creation from another packet
+ void Create (Packet &&t);
+ ///
+ /// Variant destruction
+ void Destroy ();
+
+public:
+ ///
+ /// Return the packet type
+ unsigned GetCode () const
+ {
+ return code;
+ }
+ ///
+ /// Return the packet type
+ unsigned GetRequest () const
+ {
+ return request;
+ }
+ void SetRequest (unsigned r)
+ {
+ request = r;
+ }
+ ///
+ /// Return the category of the packet's payload
+ Category GetCategory () const
+ {
+ return cat;
+ }
+
+public:
+ ///
+ /// Return an integral payload. Undefined if the category is not INTEGER
+ size_t GetInteger () const
+ {
+ return integer;
+ }
+ ///
+ /// Return (a reference to) a string payload. Undefined if the
+ /// category is not STRING
+ std::string const &GetString () const
+ {
+ return string;
+ }
+ std::string &GetString ()
+ {
+ return string;
+ }
+ ///
+ /// Return (a reference to) a constant vector of strings payload.
+ /// Undefined if the category is not VECTOR
+ std::vector<std::string> const &GetVector () const
+ {
+ return vector;
+ }
+ ///
+ /// Return (a reference to) a non-conatant vector of strings payload.
+ /// Undefined if the category is not VECTOR
+ std::vector<std::string> &GetVector ()
+ {
+ return vector;
+ }
+};
+
+class Server;
+
+///
+/// Client-side (compiler) object.
+class Client
+{
+public:
+ /// Response packet codes
+ enum PacketCode
+ {
+ PC_CORKED, ///< Messages are corked
+ PC_CONNECT, ///< Packet is integer version
+ PC_ERROR, ///< Packet is error string
+ PC_OK,
+ PC_BOOL,
+ PC_PATHNAME
+ };
+
+private:
+ Detail::MessageBuffer write; ///< Outgoing write buffer
+ Detail::MessageBuffer read; ///< Incoming read buffer
+ std::string corked; ///< Queued request tags
+ union
+ {
+ Detail::FD fd; ///< FDs connecting to server
+ Server *server; ///< Directly connected server
+ };
+ bool is_direct = false; ///< Discriminator
+ bool is_connected = false; /// Connection handshake succesful
+
+private:
+ Client ();
+public:
+ /// Direct connection constructor.
+ /// @param s Server to directly connect
+ Client (Server *s)
+ : Client ()
+ {
+ is_direct = true;
+ server = s;
+ }
+ /// Communication connection constructor
+ /// @param from file descriptor to read from
+ /// @param to file descriptor to write to, defaults to from
+ Client (int from, int to = -1)
+ : Client ()
+ {
+ fd.from = from;
+ fd.to = to < 0 ? from : to;
+ }
+ ~Client ();
+ // We have to provide our own move variants, because of the variant member.
+ Client (Client &&);
+ Client &operator= (Client &&);
+
+public:
+ ///
+ /// Direct connection predicate
+ bool IsDirect () const
+ {
+ return is_direct;
+ }
+ ///
+ /// Successful handshake predicate
+ bool IsConnected () const
+ {
+ return is_connected;
+ }
+
+public:
+ ///
+ /// Get the read FD
+ /// @result the FD to read from, -1 if a direct connection
+ int GetFDRead () const
+ {
+ return is_direct ? -1 : fd.from;
+ }
+ ///
+ /// Get the write FD
+ /// @result the FD to write to, -1 if a direct connection
+ int GetFDWrite () const
+ {
+ return is_direct ? -1 : fd.to;
+ }
+ ///
+ /// Get the directly-connected server
+ /// @result the server, or nullptr if a communication connection
+ Server *GetServer () const
+ {
+ return is_direct ? server : nullptr;
+ }
+
+public:
+ ///
+ /// Perform connection handshake. All othe requests will result in
+ /// errors, until handshake is succesful.
+ /// @param agent compiler identification
+ /// @param ident compilation identifiation (maybe nullptr)
+ /// @param alen length of agent string, if known
+ /// @param ilen length of ident string, if known
+ /// @result packet indicating success (or deferrment) of the
+ /// connection, payload is optional flags
+ Packet Connect (char const *agent, char const *ident,
+ size_t alen = ~size_t (0), size_t ilen = ~size_t (0));
+ /// std::string wrapper for connection
+ /// @param agent compiler identification
+ /// @param ident compilation identification
+ Packet Connect (std::string const &agent, std::string const &ident)
+ {
+ return Connect (agent.c_str (), ident.c_str (),
+ agent.size (), ident.size ());
+ }
+
+public:
+ /// Request compiler module repository
+ /// @result packet indicating repo
+ Packet ModuleRepo ();
+
+public:
+ /// Inform of compilation of a named module interface or partition,
+ /// or a header unit
+ /// @param str module or header-unit
+ /// @param len name length, if known
+ /// @result CMI name (or deferrment/error)
+ Packet ModuleExport (char const *str, Flags flags, size_t len = ~size_t (0));
+
+ Packet ModuleExport (char const *str)
+ {
+ return ModuleExport (str, Flags::None, ~size_t (0));
+ }
+ Packet ModuleExport (std::string const &s, Flags flags = Flags::None)
+ {
+ return ModuleExport (s.c_str (), flags, s.size ());
+ }
+
+public:
+ /// Importation of a module, partition or header-unit
+ /// @param str module or header-unit
+ /// @param len name length, if known
+ /// @result CMI name (or deferrment/error)
+ Packet ModuleImport (char const *str, Flags flags, size_t len = ~size_t (0));
+
+ Packet ModuleImport (char const *str)
+ {
+ return ModuleImport (str, Flags::None, ~size_t (0));
+ }
+ Packet ModuleImport (std::string const &s, Flags flags = Flags::None)
+ {
+ return ModuleImport (s.c_str (), flags, s.size ());
+ }
+
+public:
+ /// Successful compilation of a module interface, partition or
+ /// header-unit. Must have been preceeded by a ModuleExport
+ /// request.
+ /// @param str module or header-unit
+ /// @param len name length, if known
+ /// @result OK (or deferment/error)
+ Packet ModuleCompiled (char const *str, Flags flags, size_t len = ~size_t (0));
+
+ Packet ModuleCompiled (char const *str)
+ {
+ return ModuleCompiled (str, Flags::None, ~size_t (0));
+ }
+ Packet ModuleCompiled (std::string const &s, Flags flags = Flags::None)
+ {
+ return ModuleCompiled (s.c_str (), flags, s.size ());
+ }
+
+ /// Include translation query.
+ /// @param str header unit name
+ /// @param len name length, if known
+ /// @result Packet indicating include translation boolean, or CMI
+ /// name (or deferment/error)
+ Packet IncludeTranslate (char const *str, Flags flags,
+ size_t len = ~size_t (0));
+
+ Packet IncludeTranslate (char const *str)
+ {
+ return IncludeTranslate (str, Flags::None, ~size_t (0));
+ }
+ Packet IncludeTranslate (std::string const &s, Flags flags = Flags::None)
+ {
+ return IncludeTranslate (s.c_str (), flags, s.size ());
+ }
+
+public:
+ /// Cork the connection. All requests are queued up. Each request
+ /// call will return a PC_CORKED packet.
+ void Cork ();
+
+ /// Uncork the connection. All queued requests are sent to the
+ /// server, and a block of responses waited for.
+ /// @result A vector of packets, containing the in-order responses to the
+ /// queued requests.
+ std::vector<Packet> Uncork ();
+ ///
+ /// Indicate corkedness of connection
+ bool IsCorked () const
+ {
+ return !corked.empty ();
+ }
+
+private:
+ Packet ProcessResponse (std::vector<std::string> &, unsigned code,
+ bool isLast);
+ Packet MaybeRequest (unsigned code);
+ int CommunicateWithServer ();
+};
+
+/// This server-side class is used to resolve requests from one or
+/// more clients. You are expected to derive from it and override the
+/// virtual functions it provides. The connection resolver may return
+/// a different resolved object to service the remainder of the
+/// connection -- for instance depending on the compiler that is
+/// making the requests.
+class Resolver
+{
+public:
+ Resolver () = default;
+ virtual ~Resolver ();
+
+protected:
+ /// Mapping from a module or header-unit name to a CMI file name.
+ /// @param module module name
+ /// @result CMI name
+ virtual std::string GetCMIName (std::string const &module);
+
+ /// Return the CMI file suffix to use
+ /// @result CMI suffix, a statically allocated string
+ virtual char const *GetCMISuffix ();
+
+public:
+ /// When the requests of a directly-connected server are processed,
+ /// we may want to wait for the requests to complete (for instance a
+ /// set of subjobs).
+ /// @param s directly connected server.
+ virtual void WaitUntilReady (Server *s);
+
+public:
+ /// Provide an error response.
+ /// @param s the server to provide the response to.
+ /// @param msg the error message
+ virtual void ErrorResponse (Server *s, std::string &&msg);
+
+public:
+ /// Connection handshake. Provide response to server and return new
+ /// (or current) resolver, or nullptr.
+ /// @param s server to provide response to
+ /// @param version the client's version number
+ /// @param agent the client agent (compiler identification)
+ /// @param ident the compilation identification (may be empty)
+ /// @result nullptr in the case of an error. An error response will
+ /// be sent. If handing off to another resolver, return that,
+ /// otherwise this
+ virtual Resolver *ConnectRequest (Server *s, unsigned version,
+ std::string &agent, std::string &ident);
+
+public:
+ // return 0 on ok, ERRNO on failure, -1 on unspecific error
+ virtual int ModuleRepoRequest (Server *s);
+
+ virtual int ModuleExportRequest (Server *s, Flags flags,
+ std::string &module);
+ virtual int ModuleImportRequest (Server *s, Flags flags,
+ std::string &module);
+ virtual int ModuleCompiledRequest (Server *s, Flags flags,
+ std::string &module);
+ virtual int IncludeTranslateRequest (Server *s, Flags flags,
+ std::string &include);
+};
+
+
+/// This server-side (build system) class handles a single connection
+/// to a client. It has 3 states, READING:accumulating a message
+/// block froma client, WRITING:writing a message block to a client
+/// and PROCESSING:resolving requests. If the server does not spawn
+/// jobs to build needed artifacts, the PROCESSING state will be brief.
+class Server
+{
+public:
+ enum Direction
+ {
+ READING, // Server is waiting for completion of a (set of)
+ // requests from client. The next state will be PROCESSING.
+ WRITING, // Server is writing a (set of) responses to client.
+ // The next state will be READING.
+ PROCESSING // Server is processing client request(s). The next
+ // state will be WRITING.
+ };
+
+private:
+ Detail::MessageBuffer write;
+ Detail::MessageBuffer read;
+ Resolver *resolver;
+ Detail::FD fd;
+ bool is_connected = false;
+ Direction direction : 2;
+
+public:
+ Server (Resolver *r);
+ Server (Resolver *r, int from, int to = -1)
+ : Server (r)
+ {
+ fd.from = from;
+ fd.to = to >= 0 ? to : from;
+ }
+ ~Server ();
+ Server (Server &&);
+ Server &operator= (Server &&);
+
+public:
+ bool IsConnected () const
+ {
+ return is_connected;
+ }
+
+public:
+ void SetDirection (Direction d)
+ {
+ direction = d;
+ }
+
+public:
+ Direction GetDirection () const
+ {
+ return direction;
+ }
+ int GetFDRead () const
+ {
+ return fd.from;
+ }
+ int GetFDWrite () const
+ {
+ return fd.to;
+ }
+ Resolver *GetResolver () const
+ {
+ return resolver;
+ }
+
+public:
+ /// Process requests from a directly-connected client. This is a
+ /// small wrapper around ProcessRequests, with some buffer swapping
+ /// for communication. It is expected that such processessing is
+ /// immediate.
+ /// @param from message block from client
+ /// @param to message block to client
+ void DirectProcess (Detail::MessageBuffer &from, Detail::MessageBuffer &to);
+
+public:
+ /// Process the messages queued in the read buffer. We enter the
+ /// PROCESSING state, and each message line causes various resolver
+ /// methods to be called. Once processed, the server may need to
+ /// wait for all the requests to be ready, or it may be able to
+ /// immediately write responses back.
+ void ProcessRequests ();
+
+public:
+ /// Accumulate an error response.
+ /// @param error the error message to encode
+ /// @param elen length of error, if known
+ void ErrorResponse (char const *error, size_t elen = ~size_t (0));
+ void ErrorResponse (std::string const &error)
+ {
+ ErrorResponse (error.data (), error.size ());
+ }
+
+ /// Accumulate an OK response
+ void OKResponse ();
+
+ /// Accumulate a boolean response
+ void BoolResponse (bool);
+
+ /// Accumulate a pathname response
+ /// @param path (may be nullptr, or empty)
+ /// @param rlen length, if known
+ void PathnameResponse (char const *path, size_t plen = ~size_t (0));
+ void PathnameResponse (std::string const &path)
+ {
+ PathnameResponse (path.data (), path.size ());
+ }
+
+public:
+ /// Accumulate a (successful) connection response
+ /// @param agent the server-side agent
+ /// @param alen agent length, if known
+ void ConnectResponse (char const *agent, size_t alen = ~size_t (0));
+ void ConnectResponse (std::string const &agent)
+ {
+ ConnectResponse (agent.data (), agent.size ());
+ }
+
+public:
+ /// Write message block to client. Semantics as for
+ /// MessageBuffer::Write.
+ /// @result errno or completion (0).
+ int Write ()
+ {
+ return write.Write (fd.to);
+ }
+ /// Initialize for writing a message block. All responses to the
+ /// incomping message block must be complete Enters WRITING state.
+ void PrepareToWrite ()
+ {
+ write.PrepareToWrite ();
+ direction = WRITING;
+ }
+
+public:
+ /// Read message block from client. Semantics as for
+ /// MessageBuffer::Read.
+ /// @result errno, eof (-1) or completion (0)
+ int Read ()
+ {
+ return read.Read (fd.from);
+ }
+ /// Initialize for reading a message block. Enters READING state.
+ void PrepareToRead ()
+ {
+ read.PrepareToRead ();
+ direction = READING;
+ }
+};
+
+// Helper network stuff
+
+#if CODY_NETWORKING
+// Socket with specific address
+int OpenSocket (char const **, sockaddr const *sock, socklen_t len);
+int ListenSocket (char const **, sockaddr const *sock, socklen_t len,
+ unsigned backlog);
+
+// Local domain socket (eg AF_UNIX)
+int OpenLocal (char const **, char const *name);
+int ListenLocal (char const **, char const *name, unsigned backlog = 0);
+
+// ipv6 socket
+int OpenInet6 (char const **e, char const *name, int port);
+int ListenInet6 (char const **, char const *name, int port,
+ unsigned backlog = 0);
+#endif
+
+// FIXME: Mapping file utilities?
+
+}
+
+#endif // CODY_HH
diff --git a/libcody/config.h.in b/libcody/config.h.in
new file mode 100644
index 0000000..8542547
--- /dev/null
+++ b/libcody/config.h.in
@@ -0,0 +1,29 @@
+/* config.h.in. Generated from configure.ac by autoheader. */
+
+/* Bug reporting location */
+#undef BUGURL
+
+/* Enable checking */
+#undef NMS_CHECKING
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+#undef _FORTIFY_SOURCE
+
+#define _GNU_SOURCE 1
diff --git a/libcody/config.m4 b/libcody/config.m4
new file mode 100644
index 0000000..364195a
--- /dev/null
+++ b/libcody/config.m4
@@ -0,0 +1,280 @@
+# Nathan's Common Config -*- mode:autoconf -*-
+# Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+AC_DEFUN([NMS_NOT_IN_SOURCE],
+[if test -e configure ; then
+AC_MSG_ERROR([Do not build in the source tree. Reasons])
+fi])
+
+# thanks to Zack Weinberg for fixing this!
+AC_DEFUN([NMS_TOOLS],
+[AC_SUBST([tools], [])
+AC_ARG_WITH([tools],
+ AS_HELP_STRING([--with-tools=DIR],[tool directory]),
+ [AS_CASE([$withval],
+ [yes], [AC_MSG_ERROR([--with-tools requires an argument])],
+ [no], [:],
+ [tools="${withval%/bin}"])])
+
+if test -n "$tools" ; then
+ if test -d "$tools/bin"; then
+ PATH="$tools/bin:$PATH"
+ AC_MSG_NOTICE([Using tools in $tools])
+ else
+ AC_MSG_ERROR([tool location does not exist])
+ fi
+fi])
+
+AC_DEFUN([NMS_TOOL_DIRS],
+[if test "$tools" && test -d "$tools/include" ; then
+ CXX+=" -I$tools/include"
+fi
+if test "$tools" && test -d "$tools/lib" ; then
+ toollib="$tools/lib"
+ if os=$(CXX -print-multi-os-directory 2>/dev/null) ; then
+ toollib+="/${os}"
+ fi
+ LDFLAGS+=" -L $toollib"
+ unset toollib
+fi])
+
+AC_DEFUN([NMS_NUM_CPUS],
+[AC_MSG_CHECKING([number of CPUs])
+AS_CASE([$build],
+[*-*-darwin*], [NUM_CPUS=$(sysctl -n hw.ncpu 2>/dev/null)],
+[NUM_CPUS=$(grep -c '^processor' /proc/cpuinfo 2>/dev/null)])
+test "$NUM_CPUS" = 0 && NUM_CPUS=
+AC_MSG_RESULT([${NUM_CPUS:-unknown}])
+test "$NUM_CPUS" = 1 && NUM_CPUS=
+AC_SUBST(NUM_CPUS)])
+
+AC_DEFUN([NMS_MAINTAINER_MODE],
+[AC_ARG_ENABLE([maintainer-mode],
+AS_HELP_STRING([--enable-maintainer-mode],
+[enable maintainer mode. Add rules to rebuild configurey bits]),,
+[enable_maintainer_mode=no])
+AS_CASE([$enable_maintainer_mode],
+ [yes], [maintainer_mode=yes],
+ [no], [maintainer=no],
+ [AC_MSG_ERROR([unknown maintainer mode $enable_maintainer_mode])])
+AC_MSG_CHECKING([maintainer-mode])
+AC_MSG_RESULT([$maintainer_mode])
+test "$maintainer_mode" = yes && MAINTAINER=yes
+AC_SUBST(MAINTAINER)])
+
+AC_DEFUN([NMS_CXX_COMPILER],
+[AC_ARG_WITH([compiler],
+AS_HELP_STRING([--with-compiler=NAME],[which compiler to use]),
+AC_MSG_CHECKING([C++ compiler])
+if test "$withval" = "yes" ; then
+ AC_MSG_ERROR([NAME not specified])
+elif test "$withval" = "no" ; then
+ AC_MSG_ERROR([Gonna need a C++ compiler!])
+else
+ CXX="${withval}"
+ AC_MSG_RESULT([$CXX])
+fi)])
+
+AC_DEFUN([NMS_CXX_11],
+[AC_MSG_CHECKING([whether $CXX is for C++11])
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
+[#if __cplusplus != 201103
+#error "C++11 is required"
+#endif
+]])],
+[AC_MSG_RESULT([yes])],
+[CXX_ORIG="$CXX"
+CXX+=" -std=c++11"
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
+[#if __cplusplus != 201103
+#error "C++11 is required"
+#endif
+]])],
+AC_MSG_RESULT([adding -std=c++11]),
+[CXX="$CXX_ORIG"
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
+[#if __cplusplus > 201103
+#error "C++11 is required"
+#endif
+]])],
+AC_MSG_RESULT([> C++11]),
+AC_MSG_RESULT([no])
+AC_MSG_ERROR([C++11 is required])]))
+unset CXX_ORIG])])
+
+AC_DEFUN([NMS_CXX_20],
+[AC_MSG_CHECKING([whether $CXX is for C++20])
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
+[#if __cplusplus <= 201703
+#error "C++20 is required"
+#endif
+]])],
+[AC_MSG_RESULT([yes])],
+[CXX+=" -std=c++20"
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
+[#if __cplusplus <= 201703
+#error "C++20 is required"
+#endif
+]])],
+AC_MSG_RESULT([adding -std=c++20]),
+AC_MSG_RESULT([no])
+AC_MSG_ERROR([C++20 is required])]))
+
+AC_MSG_CHECKING([whether C++20 support is sufficiently advanced])
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <version>
+// There doesn't seem to be a feature macro for __VA_OPT__ :(
+#define VARIADIC(X,...) X __VA_OPT__((__VA_ARGS__))
+#define X(Y,Z) 1
+int ary[VARIADIC(X,Y,Z)];
+#if __cpp_constinit < 201907
+#error "C++20 constinit required"
+cpp_constinit is __cpp_constinit
+#endif
+#if __cpp_if_constexpr < 201606
+#error "C++20 constexpr required"
+cpp_constexpr is __cpp_if_constexpr
+#endif
+#if __cpp_concepts < 201907
+#error "C++20 concepts required"
+cpp_concepts is __cpp_concepts
+#endif
+#if __cpp_structured_bindings < 201606
+#error "C++20 structured bindings required"
+cpp_structured_bindings is __cpp_structured_bindings
+#endif
+#if __cpp_lib_int_pow2 < 202002
+#error "std::has_single_bit required"
+cpp_lib_int_pow2 is __cpp_lib_int_pow2
+#endif
+]])],
+AC_MSG_RESULT([yes 🙂]),
+AC_MSG_RESULT([no 🙁])
+AC_MSG_ERROR([C++20 support is too immature]))])
+
+AC_DEFUN([NMS_ENABLE_EXCEPTIONS],
+[AC_ARG_ENABLE([exceptions],
+AS_HELP_STRING([--enable-exceptions],
+[enable exceptions & rtti]),,
+[enable_exceptions="no"])
+AS_CASE([$enable_exceptions],
+ [yes], [nms_exceptions=yes],
+ [no], [nms_exceptions=no],
+ [AC_MSG_ERROR([unknown exceptions $enable_exceptions])])
+AC_MSG_CHECKING([exceptions])
+AC_MSG_RESULT([$nms_exceptions])
+if test "$nms_exceptions" != no ; then
+ EXCEPTIONS=yes
+fi
+AC_SUBST(EXCEPTIONS)])
+
+AC_DEFUN([NMS_LINK_OPT],
+[AC_MSG_CHECKING([adding $1 to linker])
+ORIG_LDFLAGS="$LDFLAGS"
+LDFLAGS+=" $1"
+AC_LINK_IFELSE([AC_LANG_PROGRAM([])],
+[AC_MSG_RESULT([ok])],
+[LDFLAGS="$ORIG_LDFLAGS"
+AC_MSG_RESULT([no])])
+unset ORIG_LDFLAGS])
+
+AC_DEFUN([NMS_BUGURL],
+[AC_MSG_CHECKING([bugurl])
+AC_ARG_WITH(bugurl,
+AS_HELP_STRING([--with-bugurl=URL],[where to report bugs]),
+AS_CASE(["$withval"],
+ [yes], [AC_MSG_ERROR([--with-bugurl requires an argument])],
+ [no], [BUGURL=""],
+ [BUGURL="${withval}"]),
+[BUGURL="${PACKAGE_BUGREPORT}"])
+AC_MSG_RESULT($BUGURL)
+AC_DEFINE_UNQUOTED(BUGURL,"$BUGURL",[Bug reporting location])])
+
+AC_DEFUN([NMS_DISTRIBUTION],
+[AC_ARG_ENABLE([distribution],
+AS_HELP_STRING([--enable-distribution],
+[enable distribution. Inhibit components that prevent distribution]),,
+[enable_distribution="no"])
+AS_CASE([$enable_distribution],
+ [yes], [nms_distribution=yes],
+ [no], [nms_distribution=no],
+ [AC_MSG_ERROR([unknown distribution $enable_distribution])])
+AC_MSG_CHECKING([distribution])
+AC_MSG_RESULT([$nms_distribution])])
+
+AC_DEFUN([NMS_ENABLE_CHECKING],
+[AC_ARG_ENABLE([checking],
+AS_HELP_STRING([--enable-checking],
+[enable run-time checking]),,
+[enable_checking="yes"])
+AS_CASE([$enable_checking],
+ [yes|all|yes,*], [nms_checking=yes],
+ [no|none|release], [nms_checking=],
+ [AC_MSG_ERROR([unknown check "$enable_checking"])])
+AC_MSG_CHECKING([checking])
+AC_MSG_RESULT([${nms_checking:-no}])
+if test "$nms_checking" = yes ; then
+ AC_DEFINE_UNQUOTED([NMS_CHECKING], [0${nms_checking:+1}], [Enable checking])
+fi])
+
+AC_DEFUN([NMS_WITH_BINUTILS],
+[AC_MSG_CHECKING([binutils])
+AC_ARG_WITH(bfd,
+AS_HELP_STRING([--with-bfd=DIR], [location of libbfd]),
+if test "$withval" = "yes" ; then
+ AC_MSG_ERROR([DIR not specified])
+elif test "$withval" = "no" ; then
+ AC_MSG_RESULT(installed)
+else
+ AC_MSG_RESULT(${withval})
+ CPPFLAGS+=" -I${withval}/include"
+ LDFLAGS+=" -L${withval}/lib"
+fi,
+AC_MSG_RESULT(installed))])
+
+AC_DEFUN([NMS_ENABLE_BACKTRACE],
+[AC_REQUIRE([NMS_DISTRIBUTION])
+AC_ARG_ENABLE([backtrace],
+AS_HELP_STRING([--enable-backtrace],[provide backtrace on fatality.]),,
+[enable_backtrace="maybe"])
+if test "${enable_backtrace:-maybe}" != no ; then
+ AC_CHECK_HEADERS(execinfo.h)
+ AC_CHECK_FUNCS(backtrace)
+ if test "$nms_distribution" = no ; then
+ AC_DEFINE([HAVE_DECL_BASENAME], [1], [Needed for demangle.h])
+ # libiberty prevents distribution because of licensing
+ AC_CHECK_HEADERS([demangle.h libiberty/demangle.h],[break])
+ # libbfd prevents distribution because of licensing
+ AC_CHECK_HEADERS([bfd.h])
+ AC_SEARCH_LIBS([bfd_openr],[bfd],[LIBS+="-lz -liberty -ldl"],,[-lz -liberty -ldl])
+ fi
+ if test "$ac_cv_func_backtrace" = yes ; then
+ nms_backtrace=yes
+ ldbacktrace=-rdynamic
+ AC_DEFINE([NMS_BACKTRACE], [1], [Enable backtrace])
+ elif test "$enable_backtrace" = yes ; then
+ AC_MSG_ERROR([Backtrace unavailable])
+ fi
+ AC_SUBST([ldbacktrace])
+fi
+AC_MSG_CHECKING([backtrace])
+AC_MSG_RESULT([${nms_backtrace:-no}])])
+
+AC_DEFUN([NMS_CONFIG_FILES],
+[CONFIG_FILES="Makefile $1"
+SUBDIRS="$2"
+for generated in config.h.in configure ; do
+ if test $srcdir/configure.ac -nt $srcdir/$generated ; then
+ touch $srcdir/$generated
+ fi
+done
+for dir in . $SUBDIRS
+do
+ CONFIG_FILES+=" $dir/Makesub"
+ test -f ${srcdir}/$dir/tests/Makesub.in && CONFIG_FILES+=" $dir/tests/Makesub"
+done
+AC_CONFIG_FILES([$CONFIG_FILES])
+AC_SUBST(configure_args,[$ac_configure_args])
+AC_SUBST(SUBDIRS)
+AC_SUBST(CONFIG_FILES)])
diff --git a/libcody/configure b/libcody/configure
new file mode 100755
index 0000000..35fd4e9
--- /dev/null
+++ b/libcody/configure
@@ -0,0 +1,4201 @@
+#! /bin/sh
+# Guess values for system-dependent variables and create Makefiles.
+# Generated by GNU Autoconf 2.69 for codylib 0.0.
+#
+# Report bugs to <github.com/urnathan/libcody>.
+#
+#
+# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc.
+#
+#
+# This configure script is free software; the Free Software Foundation
+# gives unlimited permission to copy, distribute and modify it.
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+ emulate sh
+ NULLCMD=:
+ # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '${1+"$@"}'='"$@"'
+ setopt NO_GLOB_SUBST
+else
+ case `(set -o) 2>/dev/null` in #(
+ *posix*) :
+ set -o posix ;; #(
+ *) :
+ ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='print -r --'
+ as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='printf %s\n'
+ as_echo_n='printf %s'
+else
+ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+ as_echo_n='/usr/ucb/echo -n'
+ else
+ as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+ as_echo_n_body='eval
+ arg=$1;
+ case $arg in #(
+ *"$as_nl"*)
+ expr "X$arg" : "X\\(.*\\)$as_nl";
+ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+ esac;
+ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+ '
+ export as_echo_n_body
+ as_echo_n='sh -c $as_echo_n_body as_echo'
+ fi
+ export as_echo_body
+ as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+ PATH_SEPARATOR=:
+ (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+ (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+ PATH_SEPARATOR=';'
+ }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order. Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" "" $as_nl"
+
+# Find who we are. Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+ *[\\/]* ) as_myself=$0 ;;
+ *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+ as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+ exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there. '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+# Use a proper internal environment variable to ensure we don't fall
+ # into an infinite loop, continuously re-executing ourselves.
+ if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then
+ _as_can_reexec=no; export _as_can_reexec;
+ # We cannot yet assume a decent shell, so we have to provide a
+# neutralization value for shells without unset; and this also
+# works around shells that cannot unset nonexistent variables.
+# Preserve -v and -x to the replacement shell.
+BASH_ENV=/dev/null
+ENV=/dev/null
+(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+case $- in # ((((
+ *v*x* | *x*v* ) as_opts=-vx ;;
+ *v* ) as_opts=-v ;;
+ *x* ) as_opts=-x ;;
+ * ) as_opts= ;;
+esac
+exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
+# Admittedly, this is quite paranoid, since all the known shells bail
+# out after a failed `exec'.
+$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
+as_fn_exit 255
+ fi
+ # We don't want this to propagate to other subprocesses.
+ { _as_can_reexec=; unset _as_can_reexec;}
+if test "x$CONFIG_SHELL" = x; then
+ as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+ emulate sh
+ NULLCMD=:
+ # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '\${1+\"\$@\"}'='\"\$@\"'
+ setopt NO_GLOB_SUBST
+else
+ case \`(set -o) 2>/dev/null\` in #(
+ *posix*) :
+ set -o posix ;; #(
+ *) :
+ ;;
+esac
+fi
+"
+ as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+ exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1
+test -x / || exit 1"
+ as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
+ as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
+ eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
+ test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1"
+ if (eval "$as_required") 2>/dev/null; then :
+ as_have_required=yes
+else
+ as_have_required=no
+fi
+ if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ as_found=:
+ case $as_dir in #(
+ /*)
+ for as_base in sh bash ksh sh5; do
+ # Try only shells that exist, to save several forks.
+ as_shell=$as_dir/$as_base
+ if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+ CONFIG_SHELL=$as_shell as_have_required=yes
+ if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
+ break 2
+fi
+fi
+ done;;
+ esac
+ as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+ CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+ if test "x$CONFIG_SHELL" != x; then :
+ export CONFIG_SHELL
+ # We cannot yet assume a decent shell, so we have to provide a
+# neutralization value for shells without unset; and this also
+# works around shells that cannot unset nonexistent variables.
+# Preserve -v and -x to the replacement shell.
+BASH_ENV=/dev/null
+ENV=/dev/null
+(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+case $- in # ((((
+ *v*x* | *x*v* ) as_opts=-vx ;;
+ *v* ) as_opts=-v ;;
+ *x* ) as_opts=-x ;;
+ * ) as_opts= ;;
+esac
+exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
+# Admittedly, this is quite paranoid, since all the known shells bail
+# out after a failed `exec'.
+$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
+exit 255
+fi
+
+ if test x$as_have_required = xno; then :
+ $as_echo "$0: This script requires a shell more modern than all"
+ $as_echo "$0: the shells that I found on your system."
+ if test x${ZSH_VERSION+set} = xset ; then
+ $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+ $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+ else
+ $as_echo "$0: Please tell bug-autoconf@gnu.org and
+$0: github.com/urnathan/libcody about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
+ fi
+ exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+ { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+ return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+ set +e
+ as_fn_set_status $1
+ exit $1
+} # as_fn_exit
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+ case $as_dir in #(
+ -*) as_dir=./$as_dir;;
+ esac
+ test -d "$as_dir" || eval $as_mkdir_p || {
+ as_dirs=
+ while :; do
+ case $as_dir in #(
+ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+ *) as_qdir=$as_dir;;
+ esac
+ as_dirs="'$as_qdir' $as_dirs"
+ as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+ X"$as_dir" : 'X\(//\)[^/]' \| \
+ X"$as_dir" : 'X\(//\)$' \| \
+ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)[^/].*/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+ test -d "$as_dir" && break
+ done
+ test -z "$as_dirs" || eval "mkdir $as_dirs"
+ } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+
+# as_fn_executable_p FILE
+# -----------------------
+# Test if FILE is an executable regular file.
+as_fn_executable_p ()
+{
+ test -f "$1" && test -x "$1"
+} # as_fn_executable_p
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+ eval 'as_fn_append ()
+ {
+ eval $1+=\$2
+ }'
+else
+ as_fn_append ()
+ {
+ eval $1=\$$1\$2
+ }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+ eval 'as_fn_arith ()
+ {
+ as_val=$(( $* ))
+ }'
+else
+ as_fn_arith ()
+ {
+ as_val=`expr "$@" || test $? -eq 1`
+ }
+fi # as_fn_arith
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+ as_status=$1; test $as_status -eq 0 && as_status=1
+ if test "$4"; then
+ as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+ fi
+ $as_echo "$as_me: error: $2" >&2
+ as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+ test "X`expr 00001 : '.*\(...\)'`" = X001; then
+ as_expr=expr
+else
+ as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+ as_basename=basename
+else
+ as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+ as_dirname=dirname
+else
+ as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+ X"$0" : 'X\(//\)$' \| \
+ X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+ sed '/^.*\/\([^/][^/]*\)\/*$/{
+ s//\1/
+ q
+ }
+ /^X\/\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\/\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+
+ as_lineno_1=$LINENO as_lineno_1a=$LINENO
+ as_lineno_2=$LINENO as_lineno_2a=$LINENO
+ eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
+ test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
+ # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-)
+ sed -n '
+ p
+ /[$]LINENO/=
+ ' <$as_myself |
+ sed '
+ s/[$]LINENO.*/&-/
+ t lineno
+ b
+ :lineno
+ N
+ :loop
+ s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
+ t loop
+ s/-\n.*//
+ ' >$as_me.lineno &&
+ chmod +x "$as_me.lineno" ||
+ { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
+
+ # If we had to re-execute with $CONFIG_SHELL, we're ensured to have
+ # already done that, so ensure we don't try to do so again and fall
+ # in an infinite loop. This has already happened in practice.
+ _as_can_reexec=no; export _as_can_reexec
+ # Don't try to exec as it changes $[0], causing all sort of problems
+ # (the dirname of $[0] is not the place where we might find the
+ # original and so on. Autoconf is especially sensitive to this).
+ . "./$as_me.lineno"
+ # Exit status is that of the last command.
+ exit
+}
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+ case `echo 'xy\c'` in
+ *c*) ECHO_T=' ';; # ECHO_T is single tab character.
+ xy) ECHO_C='\c';;
+ *) echo `echo ksh88 bug on AIX 6.1` > /dev/null
+ ECHO_T=' ';;
+ esac;;
+*)
+ ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+ rm -f conf$$.dir/conf$$.file
+else
+ rm -f conf$$.dir
+ mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+ if ln -s conf$$.file conf$$ 2>/dev/null; then
+ as_ln_s='ln -s'
+ # ... but there are two gotchas:
+ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+ # In both cases, we have to default to `cp -pR'.
+ ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+ as_ln_s='cp -pR'
+ elif ln conf$$.file conf$$ 2>/dev/null; then
+ as_ln_s=ln
+ else
+ as_ln_s='cp -pR'
+ fi
+else
+ as_ln_s='cp -pR'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+if mkdir -p . 2>/dev/null; then
+ as_mkdir_p='mkdir -p "$as_dir"'
+else
+ test -d ./-p && rmdir ./-p
+ as_mkdir_p=false
+fi
+
+as_test_x='test -x'
+as_executable_p=as_fn_executable_p
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+
+test -n "$DJDIR" || exec 7<&0 </dev/null
+exec 6>&1
+
+# Name of the host.
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# so uname gets run too.
+ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
+
+#
+# Initializations.
+#
+ac_default_prefix=/usr/local
+ac_clean_files=
+ac_config_libobj_dir=.
+LIBOBJS=
+cross_compiling=no
+subdirs=
+MFLAGS=
+MAKEFLAGS=
+
+# Identity of this package.
+PACKAGE_NAME='codylib'
+PACKAGE_TARNAME='codylib'
+PACKAGE_VERSION='0.0'
+PACKAGE_STRING='codylib 0.0'
+PACKAGE_BUGREPORT='github.com/urnathan/libcody'
+PACKAGE_URL=''
+
+ac_unique_file="cody.hh"
+ac_subst_vars='LTLIBOBJS
+LIBOBJS
+ALOY
+DOXYGEN
+AR
+EXCEPTIONS
+CONFIG_FILES
+SUBDIRS
+configure_args
+OBJEXT
+EXEEXT
+ac_ct_CXX
+CPPFLAGS
+LDFLAGS
+CXXFLAGS
+CXX
+MAINTAINER
+NUM_CPUS
+tools
+host_os
+host_vendor
+host_cpu
+host
+build_os
+build_vendor
+build_cpu
+build
+target_alias
+host_alias
+build_alias
+LIBS
+ECHO_T
+ECHO_N
+ECHO_C
+DEFS
+mandir
+localedir
+libdir
+psdir
+pdfdir
+dvidir
+htmldir
+infodir
+docdir
+oldincludedir
+includedir
+localstatedir
+sharedstatedir
+sysconfdir
+datadir
+datarootdir
+libexecdir
+sbindir
+bindir
+program_transform_name
+prefix
+exec_prefix
+PACKAGE_URL
+PACKAGE_BUGREPORT
+PACKAGE_STRING
+PACKAGE_VERSION
+PACKAGE_TARNAME
+PACKAGE_NAME
+PATH_SEPARATOR
+SHELL'
+ac_subst_files=''
+ac_user_opts='
+enable_option_checking
+with_tools
+enable_maintainer_mode
+with_compiler
+with_bugurl
+enable_checking
+enable_exceptions
+'
+ ac_precious_vars='build_alias
+host_alias
+target_alias
+CXX
+CXXFLAGS
+LDFLAGS
+LIBS
+CPPFLAGS
+CCC'
+
+
+# Initialize some variables set by options.
+ac_init_help=
+ac_init_version=false
+ac_unrecognized_opts=
+ac_unrecognized_sep=
+# The variables have the same names as the options, with
+# dashes changed to underlines.
+cache_file=/dev/null
+exec_prefix=NONE
+no_create=
+no_recursion=
+prefix=NONE
+program_prefix=NONE
+program_suffix=NONE
+program_transform_name=s,x,x,
+silent=
+site=
+srcdir=
+verbose=
+x_includes=NONE
+x_libraries=NONE
+
+# Installation directory options.
+# These are left unexpanded so users can "make install exec_prefix=/foo"
+# and all the variables that are supposed to be based on exec_prefix
+# by default will actually change.
+# Use braces instead of parens because sh, perl, etc. also accept them.
+# (The list follows the same order as the GNU Coding Standards.)
+bindir='${exec_prefix}/bin'
+sbindir='${exec_prefix}/sbin'
+libexecdir='${exec_prefix}/libexec'
+datarootdir='${prefix}/share'
+datadir='${datarootdir}'
+sysconfdir='${prefix}/etc'
+sharedstatedir='${prefix}/com'
+localstatedir='${prefix}/var'
+includedir='${prefix}/include'
+oldincludedir='/usr/include'
+docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
+infodir='${datarootdir}/info'
+htmldir='${docdir}'
+dvidir='${docdir}'
+pdfdir='${docdir}'
+psdir='${docdir}'
+libdir='${exec_prefix}/lib'
+localedir='${datarootdir}/locale'
+mandir='${datarootdir}/man'
+
+ac_prev=
+ac_dashdash=
+for ac_option
+do
+ # If the previous option needs an argument, assign it.
+ if test -n "$ac_prev"; then
+ eval $ac_prev=\$ac_option
+ ac_prev=
+ continue
+ fi
+
+ case $ac_option in
+ *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+ *=) ac_optarg= ;;
+ *) ac_optarg=yes ;;
+ esac
+
+ # Accept the important Cygnus configure options, so we can diagnose typos.
+
+ case $ac_dashdash$ac_option in
+ --)
+ ac_dashdash=yes ;;
+
+ -bindir | --bindir | --bindi | --bind | --bin | --bi)
+ ac_prev=bindir ;;
+ -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
+ bindir=$ac_optarg ;;
+
+ -build | --build | --buil | --bui | --bu)
+ ac_prev=build_alias ;;
+ -build=* | --build=* | --buil=* | --bui=* | --bu=*)
+ build_alias=$ac_optarg ;;
+
+ -cache-file | --cache-file | --cache-fil | --cache-fi \
+ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+ ac_prev=cache_file ;;
+ -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
+ cache_file=$ac_optarg ;;
+
+ --config-cache | -C)
+ cache_file=config.cache ;;
+
+ -datadir | --datadir | --datadi | --datad)
+ ac_prev=datadir ;;
+ -datadir=* | --datadir=* | --datadi=* | --datad=*)
+ datadir=$ac_optarg ;;
+
+ -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
+ | --dataroo | --dataro | --datar)
+ ac_prev=datarootdir ;;
+ -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
+ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
+ datarootdir=$ac_optarg ;;
+
+ -disable-* | --disable-*)
+ ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
+ # Reject names that are not valid shell variable names.
+ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+ as_fn_error $? "invalid feature name: $ac_useropt"
+ ac_useropt_orig=$ac_useropt
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+ case $ac_user_opts in
+ *"
+"enable_$ac_useropt"
+"*) ;;
+ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
+ ac_unrecognized_sep=', ';;
+ esac
+ eval enable_$ac_useropt=no ;;
+
+ -docdir | --docdir | --docdi | --doc | --do)
+ ac_prev=docdir ;;
+ -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
+ docdir=$ac_optarg ;;
+
+ -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
+ ac_prev=dvidir ;;
+ -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
+ dvidir=$ac_optarg ;;
+
+ -enable-* | --enable-*)
+ ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
+ # Reject names that are not valid shell variable names.
+ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+ as_fn_error $? "invalid feature name: $ac_useropt"
+ ac_useropt_orig=$ac_useropt
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+ case $ac_user_opts in
+ *"
+"enable_$ac_useropt"
+"*) ;;
+ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
+ ac_unrecognized_sep=', ';;
+ esac
+ eval enable_$ac_useropt=\$ac_optarg ;;
+
+ -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
+ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
+ | --exec | --exe | --ex)
+ ac_prev=exec_prefix ;;
+ -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
+ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
+ | --exec=* | --exe=* | --ex=*)
+ exec_prefix=$ac_optarg ;;
+
+ -gas | --gas | --ga | --g)
+ # Obsolete; use --with-gas.
+ with_gas=yes ;;
+
+ -help | --help | --hel | --he | -h)
+ ac_init_help=long ;;
+ -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
+ ac_init_help=recursive ;;
+ -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
+ ac_init_help=short ;;
+
+ -host | --host | --hos | --ho)
+ ac_prev=host_alias ;;
+ -host=* | --host=* | --hos=* | --ho=*)
+ host_alias=$ac_optarg ;;
+
+ -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
+ ac_prev=htmldir ;;
+ -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
+ | --ht=*)
+ htmldir=$ac_optarg ;;
+
+ -includedir | --includedir | --includedi | --included | --include \
+ | --includ | --inclu | --incl | --inc)
+ ac_prev=includedir ;;
+ -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
+ | --includ=* | --inclu=* | --incl=* | --inc=*)
+ includedir=$ac_optarg ;;
+
+ -infodir | --infodir | --infodi | --infod | --info | --inf)
+ ac_prev=infodir ;;
+ -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
+ infodir=$ac_optarg ;;
+
+ -libdir | --libdir | --libdi | --libd)
+ ac_prev=libdir ;;
+ -libdir=* | --libdir=* | --libdi=* | --libd=*)
+ libdir=$ac_optarg ;;
+
+ -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
+ | --libexe | --libex | --libe)
+ ac_prev=libexecdir ;;
+ -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
+ | --libexe=* | --libex=* | --libe=*)
+ libexecdir=$ac_optarg ;;
+
+ -localedir | --localedir | --localedi | --localed | --locale)
+ ac_prev=localedir ;;
+ -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
+ localedir=$ac_optarg ;;
+
+ -localstatedir | --localstatedir | --localstatedi | --localstated \
+ | --localstate | --localstat | --localsta | --localst | --locals)
+ ac_prev=localstatedir ;;
+ -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
+ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
+ localstatedir=$ac_optarg ;;
+
+ -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
+ ac_prev=mandir ;;
+ -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
+ mandir=$ac_optarg ;;
+
+ -nfp | --nfp | --nf)
+ # Obsolete; use --without-fp.
+ with_fp=no ;;
+
+ -no-create | --no-create | --no-creat | --no-crea | --no-cre \
+ | --no-cr | --no-c | -n)
+ no_create=yes ;;
+
+ -no-recursion | --no-recursion | --no-recursio | --no-recursi \
+ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
+ no_recursion=yes ;;
+
+ -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
+ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
+ | --oldin | --oldi | --old | --ol | --o)
+ ac_prev=oldincludedir ;;
+ -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
+ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
+ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
+ oldincludedir=$ac_optarg ;;
+
+ -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+ ac_prev=prefix ;;
+ -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+ prefix=$ac_optarg ;;
+
+ -program-prefix | --program-prefix | --program-prefi | --program-pref \
+ | --program-pre | --program-pr | --program-p)
+ ac_prev=program_prefix ;;
+ -program-prefix=* | --program-prefix=* | --program-prefi=* \
+ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
+ program_prefix=$ac_optarg ;;
+
+ -program-suffix | --program-suffix | --program-suffi | --program-suff \
+ | --program-suf | --program-su | --program-s)
+ ac_prev=program_suffix ;;
+ -program-suffix=* | --program-suffix=* | --program-suffi=* \
+ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
+ program_suffix=$ac_optarg ;;
+
+ -program-transform-name | --program-transform-name \
+ | --program-transform-nam | --program-transform-na \
+ | --program-transform-n | --program-transform- \
+ | --program-transform | --program-transfor \
+ | --program-transfo | --program-transf \
+ | --program-trans | --program-tran \
+ | --progr-tra | --program-tr | --program-t)
+ ac_prev=program_transform_name ;;
+ -program-transform-name=* | --program-transform-name=* \
+ | --program-transform-nam=* | --program-transform-na=* \
+ | --program-transform-n=* | --program-transform-=* \
+ | --program-transform=* | --program-transfor=* \
+ | --program-transfo=* | --program-transf=* \
+ | --program-trans=* | --program-tran=* \
+ | --progr-tra=* | --program-tr=* | --program-t=*)
+ program_transform_name=$ac_optarg ;;
+
+ -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
+ ac_prev=pdfdir ;;
+ -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
+ pdfdir=$ac_optarg ;;
+
+ -psdir | --psdir | --psdi | --psd | --ps)
+ ac_prev=psdir ;;
+ -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
+ psdir=$ac_optarg ;;
+
+ -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+ | -silent | --silent | --silen | --sile | --sil)
+ silent=yes ;;
+
+ -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
+ ac_prev=sbindir ;;
+ -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
+ | --sbi=* | --sb=*)
+ sbindir=$ac_optarg ;;
+
+ -sharedstatedir | --sharedstatedir | --sharedstatedi \
+ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
+ | --sharedst | --shareds | --shared | --share | --shar \
+ | --sha | --sh)
+ ac_prev=sharedstatedir ;;
+ -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
+ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
+ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
+ | --sha=* | --sh=*)
+ sharedstatedir=$ac_optarg ;;
+
+ -site | --site | --sit)
+ ac_prev=site ;;
+ -site=* | --site=* | --sit=*)
+ site=$ac_optarg ;;
+
+ -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+ ac_prev=srcdir ;;
+ -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+ srcdir=$ac_optarg ;;
+
+ -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
+ | --syscon | --sysco | --sysc | --sys | --sy)
+ ac_prev=sysconfdir ;;
+ -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
+ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
+ sysconfdir=$ac_optarg ;;
+
+ -target | --target | --targe | --targ | --tar | --ta | --t)
+ ac_prev=target_alias ;;
+ -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
+ target_alias=$ac_optarg ;;
+
+ -v | -verbose | --verbose | --verbos | --verbo | --verb)
+ verbose=yes ;;
+
+ -version | --version | --versio | --versi | --vers | -V)
+ ac_init_version=: ;;
+
+ -with-* | --with-*)
+ ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
+ # Reject names that are not valid shell variable names.
+ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+ as_fn_error $? "invalid package name: $ac_useropt"
+ ac_useropt_orig=$ac_useropt
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+ case $ac_user_opts in
+ *"
+"with_$ac_useropt"
+"*) ;;
+ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
+ ac_unrecognized_sep=', ';;
+ esac
+ eval with_$ac_useropt=\$ac_optarg ;;
+
+ -without-* | --without-*)
+ ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
+ # Reject names that are not valid shell variable names.
+ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+ as_fn_error $? "invalid package name: $ac_useropt"
+ ac_useropt_orig=$ac_useropt
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+ case $ac_user_opts in
+ *"
+"with_$ac_useropt"
+"*) ;;
+ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
+ ac_unrecognized_sep=', ';;
+ esac
+ eval with_$ac_useropt=no ;;
+
+ --x)
+ # Obsolete; use --with-x.
+ with_x=yes ;;
+
+ -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
+ | --x-incl | --x-inc | --x-in | --x-i)
+ ac_prev=x_includes ;;
+ -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
+ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
+ x_includes=$ac_optarg ;;
+
+ -x-libraries | --x-libraries | --x-librarie | --x-librari \
+ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
+ ac_prev=x_libraries ;;
+ -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
+ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
+ x_libraries=$ac_optarg ;;
+
+ -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
+ ;;
+
+ *=*)
+ ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
+ # Reject names that are not valid shell variable names.
+ case $ac_envvar in #(
+ '' | [0-9]* | *[!_$as_cr_alnum]* )
+ as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
+ esac
+ eval $ac_envvar=\$ac_optarg
+ export $ac_envvar ;;
+
+ *)
+ # FIXME: should be removed in autoconf 3.0.
+ $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
+ expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
+ $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
+ : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+ ;;
+
+ esac
+done
+
+if test -n "$ac_prev"; then
+ ac_option=--`echo $ac_prev | sed 's/_/-/g'`
+ as_fn_error $? "missing argument to $ac_option"
+fi
+
+if test -n "$ac_unrecognized_opts"; then
+ case $enable_option_checking in
+ no) ;;
+ fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
+ *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
+ esac
+fi
+
+# Check all directory arguments for consistency.
+for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \
+ datadir sysconfdir sharedstatedir localstatedir includedir \
+ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
+ libdir localedir mandir
+do
+ eval ac_val=\$$ac_var
+ # Remove trailing slashes.
+ case $ac_val in
+ */ )
+ ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
+ eval $ac_var=\$ac_val;;
+ esac
+ # Be sure to have absolute directory names.
+ case $ac_val in
+ [\\/$]* | ?:[\\/]* ) continue;;
+ NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
+ esac
+ as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
+done
+
+# There might be people who depend on the old broken behavior: `$host'
+# used to hold the argument of --host etc.
+# FIXME: To remove some day.
+build=$build_alias
+host=$host_alias
+target=$target_alias
+
+# FIXME: To remove some day.
+if test "x$host_alias" != x; then
+ if test "x$build_alias" = x; then
+ cross_compiling=maybe
+ elif test "x$build_alias" != "x$host_alias"; then
+ cross_compiling=yes
+ fi
+fi
+
+ac_tool_prefix=
+test -n "$host_alias" && ac_tool_prefix=$host_alias-
+
+test "$silent" = yes && exec 6>/dev/null
+
+
+ac_pwd=`pwd` && test -n "$ac_pwd" &&
+ac_ls_di=`ls -di .` &&
+ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
+ as_fn_error $? "working directory cannot be determined"
+test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
+ as_fn_error $? "pwd does not report name of working directory"
+
+
+# Find the source files, if location was not specified.
+if test -z "$srcdir"; then
+ ac_srcdir_defaulted=yes
+ # Try the directory containing this script, then the parent directory.
+ ac_confdir=`$as_dirname -- "$as_myself" ||
+$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+ X"$as_myself" : 'X\(//\)[^/]' \| \
+ X"$as_myself" : 'X\(//\)$' \| \
+ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_myself" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)[^/].*/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+ srcdir=$ac_confdir
+ if test ! -r "$srcdir/$ac_unique_file"; then
+ srcdir=..
+ fi
+else
+ ac_srcdir_defaulted=no
+fi
+if test ! -r "$srcdir/$ac_unique_file"; then
+ test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
+ as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
+fi
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
+ac_abs_confdir=`(
+ cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
+ pwd)`
+# When building in place, set srcdir=.
+if test "$ac_abs_confdir" = "$ac_pwd"; then
+ srcdir=.
+fi
+# Remove unnecessary trailing slashes from srcdir.
+# Double slashes in file names in object file debugging info
+# mess up M-x gdb in Emacs.
+case $srcdir in
+*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
+esac
+for ac_var in $ac_precious_vars; do
+ eval ac_env_${ac_var}_set=\${${ac_var}+set}
+ eval ac_env_${ac_var}_value=\$${ac_var}
+ eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
+ eval ac_cv_env_${ac_var}_value=\$${ac_var}
+done
+
+#
+# Report the --help message.
+#
+if test "$ac_init_help" = "long"; then
+ # Omit some internal or obsolete options to make the list less imposing.
+ # This message is too long to be a string in the A/UX 3.1 sh.
+ cat <<_ACEOF
+\`configure' configures codylib 0.0 to adapt to many kinds of systems.
+
+Usage: $0 [OPTION]... [VAR=VALUE]...
+
+To assign environment variables (e.g., CC, CFLAGS...), specify them as
+VAR=VALUE. See below for descriptions of some of the useful variables.
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+ -h, --help display this help and exit
+ --help=short display options specific to this package
+ --help=recursive display the short help of all the included packages
+ -V, --version display version information and exit
+ -q, --quiet, --silent do not print \`checking ...' messages
+ --cache-file=FILE cache test results in FILE [disabled]
+ -C, --config-cache alias for \`--cache-file=config.cache'
+ -n, --no-create do not create output files
+ --srcdir=DIR find the sources in DIR [configure dir or \`..']
+
+Installation directories:
+ --prefix=PREFIX install architecture-independent files in PREFIX
+ [$ac_default_prefix]
+ --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX
+ [PREFIX]
+
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
+
+For better control, use the options below.
+
+Fine tuning of the installation directories:
+ --bindir=DIR user executables [EPREFIX/bin]
+ --sbindir=DIR system admin executables [EPREFIX/sbin]
+ --libexecdir=DIR program executables [EPREFIX/libexec]
+ --sysconfdir=DIR read-only single-machine data [PREFIX/etc]
+ --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com]
+ --localstatedir=DIR modifiable single-machine data [PREFIX/var]
+ --libdir=DIR object code libraries [EPREFIX/lib]
+ --includedir=DIR C header files [PREFIX/include]
+ --oldincludedir=DIR C header files for non-gcc [/usr/include]
+ --datarootdir=DIR read-only arch.-independent data root [PREFIX/share]
+ --datadir=DIR read-only architecture-independent data [DATAROOTDIR]
+ --infodir=DIR info documentation [DATAROOTDIR/info]
+ --localedir=DIR locale-dependent data [DATAROOTDIR/locale]
+ --mandir=DIR man documentation [DATAROOTDIR/man]
+ --docdir=DIR documentation root [DATAROOTDIR/doc/codylib]
+ --htmldir=DIR html documentation [DOCDIR]
+ --dvidir=DIR dvi documentation [DOCDIR]
+ --pdfdir=DIR pdf documentation [DOCDIR]
+ --psdir=DIR ps documentation [DOCDIR]
+_ACEOF
+
+ cat <<\_ACEOF
+
+System types:
+ --build=BUILD configure for building on BUILD [guessed]
+ --host=HOST cross-compile to build programs to run on HOST [BUILD]
+_ACEOF
+fi
+
+if test -n "$ac_init_help"; then
+ case $ac_init_help in
+ short | recursive ) echo "Configuration of codylib 0.0:";;
+ esac
+ cat <<\_ACEOF
+
+Optional Features:
+ --disable-option-checking ignore unrecognized --enable/--with options
+ --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no)
+ --enable-FEATURE[=ARG] include FEATURE [ARG=yes]
+ --enable-maintainer-mode
+ enable maintainer mode. Add rules to rebuild
+ configurey bits
+ --enable-checking enable run-time checking
+ --enable-exceptions enable exceptions & rtti
+
+Optional Packages:
+ --with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
+ --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no)
+ --with-tools=DIR tool directory
+ --with-compiler=NAME which compiler to use
+ --with-bugurl=URL where to report bugs
+
+Some influential environment variables:
+ CXX C++ compiler command
+ CXXFLAGS C++ compiler flags
+ LDFLAGS linker flags, e.g. -L<lib dir> if you have libraries in a
+ nonstandard directory <lib dir>
+ LIBS libraries to pass to the linker, e.g. -l<library>
+ CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
+ you have headers in a nonstandard directory <include dir>
+
+Use these variables to override the choices made by `configure' or to help
+it to find libraries and programs with nonstandard names/locations.
+
+Report bugs to <github.com/urnathan/libcody>.
+_ACEOF
+ac_status=$?
+fi
+
+if test "$ac_init_help" = "recursive"; then
+ # If there are subdirs, report their specific --help.
+ for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
+ test -d "$ac_dir" ||
+ { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
+ continue
+ ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+ # A ".." for each directory in $ac_dir_suffix.
+ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+ case $ac_top_builddir_sub in
+ "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+ *) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+ esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+ .) # We are building in place.
+ ac_srcdir=.
+ ac_top_srcdir=$ac_top_builddir_sub
+ ac_abs_top_srcdir=$ac_pwd ;;
+ [\\/]* | ?:[\\/]* ) # Absolute name.
+ ac_srcdir=$srcdir$ac_dir_suffix;
+ ac_top_srcdir=$srcdir
+ ac_abs_top_srcdir=$srcdir ;;
+ *) # Relative name.
+ ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+ ac_top_srcdir=$ac_top_build_prefix$srcdir
+ ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+ cd "$ac_dir" || { ac_status=$?; continue; }
+ # Check for guested configure.
+ if test -f "$ac_srcdir/configure.gnu"; then
+ echo &&
+ $SHELL "$ac_srcdir/configure.gnu" --help=recursive
+ elif test -f "$ac_srcdir/configure"; then
+ echo &&
+ $SHELL "$ac_srcdir/configure" --help=recursive
+ else
+ $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
+ fi || ac_status=$?
+ cd "$ac_pwd" || { ac_status=$?; break; }
+ done
+fi
+
+test -n "$ac_init_help" && exit $ac_status
+if $ac_init_version; then
+ cat <<\_ACEOF
+codylib configure 0.0
+generated by GNU Autoconf 2.69
+
+Copyright (C) 2012 Free Software Foundation, Inc.
+This configure script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it.
+_ACEOF
+ exit
+fi
+
+## ------------------------ ##
+## Autoconf initialization. ##
+## ------------------------ ##
+
+# ac_fn_cxx_try_compile LINENO
+# ----------------------------
+# Try to compile conftest.$ac_ext, and return whether this succeeded.
+ac_fn_cxx_try_compile ()
+{
+ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ rm -f conftest.$ac_objext
+ if { { ac_try="$ac_compile"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_compile") 2>conftest.err
+ ac_status=$?
+ if test -s conftest.err; then
+ grep -v '^ *+' conftest.err >conftest.er1
+ cat conftest.er1 >&5
+ mv -f conftest.er1 conftest.err
+ fi
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; } && {
+ test -z "$ac_cxx_werror_flag" ||
+ test ! -s conftest.err
+ } && test -s conftest.$ac_objext; then :
+ ac_retval=0
+else
+ $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+ ac_retval=1
+fi
+ eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+ as_fn_set_status $ac_retval
+
+} # ac_fn_cxx_try_compile
+
+# ac_fn_cxx_try_link LINENO
+# -------------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded.
+ac_fn_cxx_try_link ()
+{
+ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ rm -f conftest.$ac_objext conftest$ac_exeext
+ if { { ac_try="$ac_link"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_link") 2>conftest.err
+ ac_status=$?
+ if test -s conftest.err; then
+ grep -v '^ *+' conftest.err >conftest.er1
+ cat conftest.er1 >&5
+ mv -f conftest.er1 conftest.err
+ fi
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; } && {
+ test -z "$ac_cxx_werror_flag" ||
+ test ! -s conftest.err
+ } && test -s conftest$ac_exeext && {
+ test "$cross_compiling" = yes ||
+ test -x conftest$ac_exeext
+ }; then :
+ ac_retval=0
+else
+ $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+ ac_retval=1
+fi
+ # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+ # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+ # interfere with the next link command; also delete a directory that is
+ # left behind by Apple's compiler. We do this before executing the actions.
+ rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+ eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+ as_fn_set_status $ac_retval
+
+} # ac_fn_cxx_try_link
+cat >config.log <<_ACEOF
+This file contains any messages produced by compilers while
+running configure, to aid debugging if configure makes a mistake.
+
+It was created by codylib $as_me 0.0, which was
+generated by GNU Autoconf 2.69. Invocation command line was
+
+ $ $0 $@
+
+_ACEOF
+exec 5>>config.log
+{
+cat <<_ASUNAME
+## --------- ##
+## Platform. ##
+## --------- ##
+
+hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
+/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown`
+
+/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown`
+/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
+/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown`
+/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown`
+/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown`
+/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown`
+
+_ASUNAME
+
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ $as_echo "PATH: $as_dir"
+ done
+IFS=$as_save_IFS
+
+} >&5
+
+cat >&5 <<_ACEOF
+
+
+## ----------- ##
+## Core tests. ##
+## ----------- ##
+
+_ACEOF
+
+
+# Keep a trace of the command line.
+# Strip out --no-create and --no-recursion so they do not pile up.
+# Strip out --silent because we don't want to record it for future runs.
+# Also quote any args containing shell meta-characters.
+# Make two passes to allow for proper duplicate-argument suppression.
+ac_configure_args=
+ac_configure_args0=
+ac_configure_args1=
+ac_must_keep_next=false
+for ac_pass in 1 2
+do
+ for ac_arg
+ do
+ case $ac_arg in
+ -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
+ -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+ | -silent | --silent | --silen | --sile | --sil)
+ continue ;;
+ *\'*)
+ ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
+ esac
+ case $ac_pass in
+ 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
+ 2)
+ as_fn_append ac_configure_args1 " '$ac_arg'"
+ if test $ac_must_keep_next = true; then
+ ac_must_keep_next=false # Got value, back to normal.
+ else
+ case $ac_arg in
+ *=* | --config-cache | -C | -disable-* | --disable-* \
+ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
+ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
+ | -with-* | --with-* | -without-* | --without-* | --x)
+ case "$ac_configure_args0 " in
+ "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
+ esac
+ ;;
+ -* ) ac_must_keep_next=true ;;
+ esac
+ fi
+ as_fn_append ac_configure_args " '$ac_arg'"
+ ;;
+ esac
+ done
+done
+{ ac_configure_args0=; unset ac_configure_args0;}
+{ ac_configure_args1=; unset ac_configure_args1;}
+
+# When interrupted or exit'd, cleanup temporary files, and complete
+# config.log. We remove comments because anyway the quotes in there
+# would cause problems or look ugly.
+# WARNING: Use '\'' to represent an apostrophe within the trap.
+# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
+trap 'exit_status=$?
+ # Save into config.log some information that might help in debugging.
+ {
+ echo
+
+ $as_echo "## ---------------- ##
+## Cache variables. ##
+## ---------------- ##"
+ echo
+ # The following way of writing the cache mishandles newlines in values,
+(
+ for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
+ eval ac_val=\$$ac_var
+ case $ac_val in #(
+ *${as_nl}*)
+ case $ac_var in #(
+ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+ esac
+ case $ac_var in #(
+ _ | IFS | as_nl) ;; #(
+ BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+ *) { eval $ac_var=; unset $ac_var;} ;;
+ esac ;;
+ esac
+ done
+ (set) 2>&1 |
+ case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
+ *${as_nl}ac_space=\ *)
+ sed -n \
+ "s/'\''/'\''\\\\'\'''\''/g;
+ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
+ ;; #(
+ *)
+ sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+ ;;
+ esac |
+ sort
+)
+ echo
+
+ $as_echo "## ----------------- ##
+## Output variables. ##
+## ----------------- ##"
+ echo
+ for ac_var in $ac_subst_vars
+ do
+ eval ac_val=\$$ac_var
+ case $ac_val in
+ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+ esac
+ $as_echo "$ac_var='\''$ac_val'\''"
+ done | sort
+ echo
+
+ if test -n "$ac_subst_files"; then
+ $as_echo "## ------------------- ##
+## File substitutions. ##
+## ------------------- ##"
+ echo
+ for ac_var in $ac_subst_files
+ do
+ eval ac_val=\$$ac_var
+ case $ac_val in
+ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+ esac
+ $as_echo "$ac_var='\''$ac_val'\''"
+ done | sort
+ echo
+ fi
+
+ if test -s confdefs.h; then
+ $as_echo "## ----------- ##
+## confdefs.h. ##
+## ----------- ##"
+ echo
+ cat confdefs.h
+ echo
+ fi
+ test "$ac_signal" != 0 &&
+ $as_echo "$as_me: caught signal $ac_signal"
+ $as_echo "$as_me: exit $exit_status"
+ } >&5
+ rm -f core *.core core.conftest.* &&
+ rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
+ exit $exit_status
+' 0
+for ac_signal in 1 2 13 15; do
+ trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
+done
+ac_signal=0
+
+# confdefs.h avoids OS command line length limits that DEFS can exceed.
+rm -f -r conftest* confdefs.h
+
+$as_echo "/* confdefs.h */" > confdefs.h
+
+# Predefined preprocessor variables.
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_NAME "$PACKAGE_NAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_VERSION "$PACKAGE_VERSION"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_STRING "$PACKAGE_STRING"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_URL "$PACKAGE_URL"
+_ACEOF
+
+
+# Let the site file select an alternate cache file if it wants to.
+# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
+if test -n "$CONFIG_SITE"; then
+ # We do not want a PATH search for config.site.
+ case $CONFIG_SITE in #((
+ -*) ac_site_file1=./$CONFIG_SITE;;
+ */*) ac_site_file1=$CONFIG_SITE;;
+ *) ac_site_file1=./$CONFIG_SITE;;
+ esac
+elif test "x$prefix" != xNONE; then
+ ac_site_file1=$prefix/share/config.site
+ ac_site_file2=$prefix/etc/config.site
+else
+ ac_site_file1=$ac_default_prefix/share/config.site
+ ac_site_file2=$ac_default_prefix/etc/config.site
+fi
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
+do
+ test "x$ac_site_file" = xNONE && continue
+ if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
+$as_echo "$as_me: loading site script $ac_site_file" >&6;}
+ sed 's/^/| /' "$ac_site_file" >&5
+ . "$ac_site_file" \
+ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "failed to load site script $ac_site_file
+See \`config.log' for more details" "$LINENO" 5; }
+ fi
+done
+
+if test -r "$cache_file"; then
+ # Some versions of bash will fail to source /dev/null (special files
+ # actually), so we avoid doing that. DJGPP emulates it as a regular file.
+ if test /dev/null != "$cache_file" && test -f "$cache_file"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
+$as_echo "$as_me: loading cache $cache_file" >&6;}
+ case $cache_file in
+ [\\/]* | ?:[\\/]* ) . "$cache_file";;
+ *) . "./$cache_file";;
+ esac
+ fi
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
+$as_echo "$as_me: creating cache $cache_file" >&6;}
+ >$cache_file
+fi
+
+# Check that the precious variables saved in the cache have kept the same
+# value.
+ac_cache_corrupted=false
+for ac_var in $ac_precious_vars; do
+ eval ac_old_set=\$ac_cv_env_${ac_var}_set
+ eval ac_new_set=\$ac_env_${ac_var}_set
+ eval ac_old_val=\$ac_cv_env_${ac_var}_value
+ eval ac_new_val=\$ac_env_${ac_var}_value
+ case $ac_old_set,$ac_new_set in
+ set,)
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
+ ac_cache_corrupted=: ;;
+ ,set)
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
+ ac_cache_corrupted=: ;;
+ ,);;
+ *)
+ if test "x$ac_old_val" != "x$ac_new_val"; then
+ # differences in whitespace do not lead to failure.
+ ac_old_val_w=`echo x $ac_old_val`
+ ac_new_val_w=`echo x $ac_new_val`
+ if test "$ac_old_val_w" != "$ac_new_val_w"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
+$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
+ ac_cache_corrupted=:
+ else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
+$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
+ eval $ac_var=\$ac_old_val
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5
+$as_echo "$as_me: former value: \`$ac_old_val'" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5
+$as_echo "$as_me: current value: \`$ac_new_val'" >&2;}
+ fi;;
+ esac
+ # Pass precious variables to config.status.
+ if test "$ac_new_set" = set; then
+ case $ac_new_val in
+ *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
+ *) ac_arg=$ac_var=$ac_new_val ;;
+ esac
+ case " $ac_configure_args " in
+ *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy.
+ *) as_fn_append ac_configure_args " '$ac_arg'" ;;
+ esac
+ fi
+done
+if $ac_cache_corrupted; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
+$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
+ as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+fi
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+
+# Nathan's Common Config -*- mode:autoconf -*-
+# Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+
+
+# thanks to Zack Weinberg for fixing this!
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ac_aux_dir=
+for ac_dir in build-aux "$srcdir"/build-aux; do
+ if test -f "$ac_dir/install-sh"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/install-sh -c"
+ break
+ elif test -f "$ac_dir/install.sh"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/install.sh -c"
+ break
+ elif test -f "$ac_dir/shtool"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/shtool install -c"
+ break
+ fi
+done
+if test -z "$ac_aux_dir"; then
+ as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5
+fi
+
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var.
+
+
+
+
+if test -e configure ; then
+as_fn_error $? "Do not build in the source tree. Reasons" "$LINENO" 5
+fi
+# Make sure we can run config.sub.
+$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
+ as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
+$as_echo_n "checking build system type... " >&6; }
+if ${ac_cv_build+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ ac_build_alias=$build_alias
+test "x$ac_build_alias" = x &&
+ ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
+test "x$ac_build_alias" = x &&
+ as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
+ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
+ as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
+$as_echo "$ac_cv_build" >&6; }
+case $ac_cv_build in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
+esac
+build=$ac_cv_build
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_build
+shift
+build_cpu=$1
+build_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+build_os=$*
+IFS=$ac_save_IFS
+case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
+$as_echo_n "checking host system type... " >&6; }
+if ${ac_cv_host+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test "x$host_alias" = x; then
+ ac_cv_host=$ac_cv_build
+else
+ ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
+ as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
+$as_echo "$ac_cv_host" >&6; }
+case $ac_cv_host in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
+esac
+host=$ac_cv_host
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_host
+shift
+host_cpu=$1
+host_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+host_os=$*
+IFS=$ac_save_IFS
+case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
+
+
+
+
+
+# Check whether --with-tools was given.
+if test "${with_tools+set}" = set; then :
+ withval=$with_tools; case $withval in #(
+ yes) :
+ as_fn_error $? "--with-tools requires an argument" "$LINENO" 5 ;; #(
+ no) :
+ : ;; #(
+ *) :
+ tools="${withval%/bin}" ;;
+esac
+fi
+
+
+if test -n "$tools" ; then
+ if test -d "$tools/bin"; then
+ PATH="$tools/bin:$PATH"
+ { $as_echo "$as_me:${as_lineno-$LINENO}: Using tools in $tools" >&5
+$as_echo "$as_me: Using tools in $tools" >&6;}
+ else
+ as_fn_error $? "tool location does not exist" "$LINENO" 5
+ fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking number of CPUs" >&5
+$as_echo_n "checking number of CPUs... " >&6; }
+case $build in #(
+ *-*-darwin*) :
+ NUM_CPUS=$(sysctl -n hw.ncpu 2>/dev/null) ;; #(
+ *) :
+ NUM_CPUS=$(grep -c '^processor' /proc/cpuinfo 2>/dev/null) ;;
+esac
+test "$NUM_CPUS" = 0 && NUM_CPUS=
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${NUM_CPUS:-unknown}" >&5
+$as_echo "${NUM_CPUS:-unknown}" >&6; }
+test "$NUM_CPUS" = 1 && NUM_CPUS=
+
+# Check whether --enable-maintainer-mode was given.
+if test "${enable_maintainer_mode+set}" = set; then :
+ enableval=$enable_maintainer_mode;
+else
+ enable_maintainer_mode=no
+fi
+
+case $enable_maintainer_mode in #(
+ yes) :
+ maintainer_mode=yes ;; #(
+ no) :
+ maintainer=no ;; #(
+ *) :
+ as_fn_error $? "unknown maintainer mode $enable_maintainer_mode" "$LINENO" 5 ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking maintainer-mode" >&5
+$as_echo_n "checking maintainer-mode... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $maintainer_mode" >&5
+$as_echo "$maintainer_mode" >&6; }
+test "$maintainer_mode" = yes && MAINTAINER=yes
+
+
+# Check whether --with-compiler was given.
+if test "${with_compiler+set}" = set; then :
+ withval=$with_compiler; { $as_echo "$as_me:${as_lineno-$LINENO}: checking C++ compiler" >&5
+$as_echo_n "checking C++ compiler... " >&6; }
+if test "$withval" = "yes" ; then
+ as_fn_error $? "NAME not specified" "$LINENO" 5
+elif test "$withval" = "no" ; then
+ as_fn_error $? "Gonna need a C++ compiler!" "$LINENO" 5
+else
+ CXX="${withval}"
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5
+$as_echo "$CXX" >&6; }
+fi
+fi
+
+ac_ext=cpp
+ac_cpp='$CXXCPP $CPPFLAGS'
+ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
+
+ac_ext=cpp
+ac_cpp='$CXXCPP $CPPFLAGS'
+ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
+if test -z "$CXX"; then
+ if test -n "$CCC"; then
+ CXX=$CCC
+ else
+ if test -n "$ac_tool_prefix"; then
+ for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
+ do
+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CXX+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$CXX"; then
+ ac_cv_prog_CXX="$CXX" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_CXX="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+CXX=$ac_cv_prog_CXX
+if test -n "$CXX"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5
+$as_echo "$CXX" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$CXX" && break
+ done
+fi
+if test -z "$CXX"; then
+ ac_ct_CXX=$CXX
+ for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CXX+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_CXX"; then
+ ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_ac_ct_CXX="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CXX=$ac_cv_prog_ac_ct_CXX
+if test -n "$ac_ct_CXX"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5
+$as_echo "$ac_ct_CXX" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$ac_ct_CXX" && break
+done
+
+ if test "x$ac_ct_CXX" = x; then
+ CXX="g++"
+ else
+ case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+ CXX=$ac_ct_CXX
+ fi
+fi
+
+ fi
+fi
+# Provide some information about the compiler.
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5
+set X $ac_compile
+ac_compiler=$2
+for ac_option in --version -v -V -qversion; do
+ { { ac_try="$ac_compiler $ac_option >&5"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_compiler $ac_option >&5") 2>conftest.err
+ ac_status=$?
+ if test -s conftest.err; then
+ sed '10a\
+... rest of stderr output deleted ...
+ 10q' conftest.err >conftest.er1
+ cat conftest.er1 >&5
+ fi
+ rm -f conftest.er1 conftest.err
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }
+done
+
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
+# Try to create an executable without -o first, disregard a.out.
+# It will help us diagnose broken compilers, and finding out an intuition
+# of exeext.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C++ compiler works" >&5
+$as_echo_n "checking whether the C++ compiler works... " >&6; }
+ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
+
+# The possible output files:
+ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
+
+ac_rmfiles=
+for ac_file in $ac_files
+do
+ case $ac_file in
+ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+ * ) ac_rmfiles="$ac_rmfiles $ac_file";;
+ esac
+done
+rm -f $ac_rmfiles
+
+if { { ac_try="$ac_link_default"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_link_default") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
+ # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
+# in a Makefile. We should not override ac_cv_exeext if it was cached,
+# so that the user can short-circuit this test for compilers unknown to
+# Autoconf.
+for ac_file in $ac_files ''
+do
+ test -f "$ac_file" || continue
+ case $ac_file in
+ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
+ ;;
+ [ab].out )
+ # We found the default executable, but exeext='' is most
+ # certainly right.
+ break;;
+ *.* )
+ if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
+ then :; else
+ ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+ fi
+ # We set ac_cv_exeext here because the later test for it is not
+ # safe: cross compilers may not add the suffix if given an `-o'
+ # argument, so we may need to know it at that point already.
+ # Even if this section looks crufty: it has the advantage of
+ # actually working.
+ break;;
+ * )
+ break;;
+ esac
+done
+test "$ac_cv_exeext" = no && ac_cv_exeext=
+
+else
+ ac_file=''
+fi
+if test -z "$ac_file"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+$as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error 77 "C++ compiler cannot create executables
+See \`config.log' for more details" "$LINENO" 5; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler default output file name" >&5
+$as_echo_n "checking for C++ compiler default output file name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
+$as_echo "$ac_file" >&6; }
+ac_exeext=$ac_cv_exeext
+
+rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
+$as_echo_n "checking for suffix of executables... " >&6; }
+if { { ac_try="$ac_link"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_link") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
+ # If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
+for ac_file in conftest.exe conftest conftest.*; do
+ test -f "$ac_file" || continue
+ case $ac_file in
+ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+ *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+ break;;
+ * ) break;;
+ esac
+done
+else
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest conftest$ac_cv_exeext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
+$as_echo "$ac_cv_exeext" >&6; }
+
+rm -f conftest.$ac_ext
+EXEEXT=$ac_cv_exeext
+ac_exeext=$EXEEXT
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include <stdio.h>
+int
+main ()
+{
+FILE *f = fopen ("conftest.out", "w");
+ return ferror (f) || fclose (f) != 0;
+
+ ;
+ return 0;
+}
+_ACEOF
+ac_clean_files="$ac_clean_files conftest.out"
+# Check that the compiler produces executables we can run. If not, either
+# the compiler is broken, or we cross compile.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
+if test "$cross_compiling" != yes; then
+ { { ac_try="$ac_link"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_link") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }
+ if { ac_try='./conftest$ac_cv_exeext'
+ { { case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_try") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; }; then
+ cross_compiling=no
+ else
+ if test "$cross_compiling" = maybe; then
+ cross_compiling=yes
+ else
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C++ compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
+ fi
+ fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
+
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
+$as_echo_n "checking for suffix of object files... " >&6; }
+if ${ac_cv_objext+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+rm -f conftest.o conftest.obj
+if { { ac_try="$ac_compile"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_compile") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
+ for ac_file in conftest.o conftest.obj conftest.*; do
+ test -f "$ac_file" || continue;
+ case $ac_file in
+ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
+ *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
+ break;;
+ esac
+done
+else
+ $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest.$ac_cv_objext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
+$as_echo "$ac_cv_objext" >&6; }
+OBJEXT=$ac_cv_objext
+ac_objext=$OBJEXT
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5
+$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; }
+if ${ac_cv_cxx_compiler_gnu+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+#ifndef __GNUC__
+ choke me
+#endif
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ ac_compiler_gnu=yes
+else
+ ac_compiler_gnu=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ac_cv_cxx_compiler_gnu=$ac_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5
+$as_echo "$ac_cv_cxx_compiler_gnu" >&6; }
+if test $ac_compiler_gnu = yes; then
+ GXX=yes
+else
+ GXX=
+fi
+ac_test_CXXFLAGS=${CXXFLAGS+set}
+ac_save_CXXFLAGS=$CXXFLAGS
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5
+$as_echo_n "checking whether $CXX accepts -g... " >&6; }
+if ${ac_cv_prog_cxx_g+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ ac_save_cxx_werror_flag=$ac_cxx_werror_flag
+ ac_cxx_werror_flag=yes
+ ac_cv_prog_cxx_g=no
+ CXXFLAGS="-g"
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ ac_cv_prog_cxx_g=yes
+else
+ CXXFLAGS=""
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+
+else
+ ac_cxx_werror_flag=$ac_save_cxx_werror_flag
+ CXXFLAGS="-g"
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ ac_cv_prog_cxx_g=yes
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ ac_cxx_werror_flag=$ac_save_cxx_werror_flag
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5
+$as_echo "$ac_cv_prog_cxx_g" >&6; }
+if test "$ac_test_CXXFLAGS" = set; then
+ CXXFLAGS=$ac_save_CXXFLAGS
+elif test $ac_cv_prog_cxx_g = yes; then
+ if test "$GXX" = yes; then
+ CXXFLAGS="-g -O2"
+ else
+ CXXFLAGS="-g"
+ fi
+else
+ if test "$GXX" = yes; then
+ CXXFLAGS="-O2"
+ else
+ CXXFLAGS=
+ fi
+fi
+ac_ext=cpp
+ac_cpp='$CXXCPP $CPPFLAGS'
+ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX is for C++11" >&5
+$as_echo_n "checking whether $CXX is for C++11... " >&6; }
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+#if __cplusplus != 201103
+#error "C++11 is required"
+#endif
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+ CXX_ORIG="$CXX"
+CXX+=" -std=c++11"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+#if __cplusplus != 201103
+#error "C++11 is required"
+#endif
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: adding -std=c++11" >&5
+$as_echo "adding -std=c++11" >&6; }
+else
+ CXX="$CXX_ORIG"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+#if __cplusplus > 201103
+#error "C++11 is required"
+#endif
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_compile "$LINENO"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: > C++11" >&5
+$as_echo "> C++11" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+as_fn_error $? "C++11 is required" "$LINENO" 5
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+unset CXX_ORIG
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+if test "$tools" && test -d "$tools/include" ; then
+ CXX+=" -I$tools/include"
+fi
+if test "$tools" && test -d "$tools/lib" ; then
+ toollib="$tools/lib"
+ if os=$(CXX -print-multi-os-directory 2>/dev/null) ; then
+ toollib+="/${os}"
+ fi
+ LDFLAGS+=" -L $toollib"
+ unset toollib
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking adding -Wl,--no-undefined to linker" >&5
+$as_echo_n "checking adding -Wl,--no-undefined to linker... " >&6; }
+ORIG_LDFLAGS="$LDFLAGS"
+LDFLAGS+=" -Wl,--no-undefined"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_cxx_try_link "$LINENO"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
+$as_echo "ok" >&6; }
+else
+ LDFLAGS="$ORIG_LDFLAGS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+unset ORIG_LDFLAGS
+CONFIG_FILES="Makefile gdbinit dox.cfg"
+SUBDIRS=""
+for generated in config.h.in configure ; do
+ if test $srcdir/configure.ac -nt $srcdir/$generated ; then
+ touch $srcdir/$generated
+ fi
+done
+for dir in . $SUBDIRS
+do
+ CONFIG_FILES+=" $dir/Makesub"
+ test -f ${srcdir}/$dir/tests/Makesub.in && CONFIG_FILES+=" $dir/tests/Makesub"
+done
+ac_config_files="$ac_config_files $CONFIG_FILES"
+
+configure_args=$ac_configure_args
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking bugurl" >&5
+$as_echo_n "checking bugurl... " >&6; }
+
+# Check whether --with-bugurl was given.
+if test "${with_bugurl+set}" = set; then :
+ withval=$with_bugurl; case "$withval" in #(
+ yes) :
+ as_fn_error $? "--with-bugurl requires an argument" "$LINENO" 5 ;; #(
+ no) :
+ BUGURL="" ;; #(
+ *) :
+ BUGURL="${withval}" ;;
+esac
+else
+ BUGURL="${PACKAGE_BUGREPORT}"
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $BUGURL" >&5
+$as_echo "$BUGURL" >&6; }
+
+cat >>confdefs.h <<_ACEOF
+#define BUGURL "$BUGURL"
+_ACEOF
+
+# Check whether --enable-checking was given.
+if test "${enable_checking+set}" = set; then :
+ enableval=$enable_checking;
+else
+ enable_checking="yes"
+fi
+
+case $enable_checking in #(
+ yes|all|yes,*) :
+ nms_checking=yes ;; #(
+ no|none|release) :
+ nms_checking= ;; #(
+ *) :
+ as_fn_error $? "unknown check \"$enable_checking\"" "$LINENO" 5 ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking checking" >&5
+$as_echo_n "checking checking... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${nms_checking:-no}" >&5
+$as_echo "${nms_checking:-no}" >&6; }
+if test "$nms_checking" = yes ; then
+
+cat >>confdefs.h <<_ACEOF
+#define NMS_CHECKING 0${nms_checking:+1}
+_ACEOF
+
+fi
+# Check whether --enable-exceptions was given.
+if test "${enable_exceptions+set}" = set; then :
+ enableval=$enable_exceptions;
+else
+ enable_exceptions="no"
+fi
+
+case $enable_exceptions in #(
+ yes) :
+ nms_exceptions=yes ;; #(
+ no) :
+ nms_exceptions=no ;; #(
+ *) :
+ as_fn_error $? "unknown exceptions $enable_exceptions" "$LINENO" 5 ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking exceptions" >&5
+$as_echo_n "checking exceptions... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $nms_exceptions" >&5
+$as_echo "$nms_exceptions" >&6; }
+if test "$nms_exceptions" != no ; then
+ EXCEPTIONS=yes
+fi
+
+
+ac_config_headers="$ac_config_headers config.h"
+
+if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AR+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$AR"; then
+ ac_cv_prog_AR="$AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_AR="${ac_tool_prefix}ar"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+AR=$ac_cv_prog_AR
+if test -n "$AR"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
+$as_echo "$AR" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_AR"; then
+ ac_ct_AR=$AR
+ # Extract the first word of "ar", so it can be a program name with args.
+set dummy ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AR+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_AR"; then
+ ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_ac_ct_AR="ar"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AR=$ac_cv_prog_ac_ct_AR
+if test -n "$ac_ct_AR"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
+$as_echo "$ac_ct_AR" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+ if test "x$ac_ct_AR" = x; then
+ AR=""
+ else
+ case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+ AR=$ac_ct_AR
+ fi
+else
+ AR="$ac_cv_prog_AR"
+fi
+
+# Extract the first word of "doxygen", so it can be a program name with args.
+set dummy doxygen; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DOXYGEN+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$DOXYGEN"; then
+ ac_cv_prog_DOXYGEN="$DOXYGEN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_DOXYGEN="doxygen"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ test -z "$ac_cv_prog_DOXYGEN" && ac_cv_prog_DOXYGEN=": NOTdoxygen"
+fi
+fi
+DOXYGEN=$ac_cv_prog_DOXYGEN
+if test -n "$DOXYGEN"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DOXYGEN" >&5
+$as_echo "$DOXYGEN" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+# Extract the first word of "aloy", so it can be a program name with args.
+set dummy aloy; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ALOY+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ALOY"; then
+ ac_cv_prog_ALOY="$ALOY" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_ALOY="aloy"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ test -z "$ac_cv_prog_ALOY" && ac_cv_prog_ALOY=": Joust testsuite missing"
+fi
+fi
+ALOY=$ac_cv_prog_ALOY
+if test -n "$ALOY"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ALOY" >&5
+$as_echo "$ALOY" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+
+
+cat >confcache <<\_ACEOF
+# This file is a shell script that caches the results of configure
+# tests run on this system so they can be shared between configure
+# scripts and configure runs, see configure's option --config-cache.
+# It is not useful on other systems. If it contains results you don't
+# want to keep, you may remove or edit it.
+#
+# config.status only pays attention to the cache file if you give it
+# the --recheck option to rerun configure.
+#
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
+# following values.
+
+_ACEOF
+
+# The following way of writing the cache mishandles newlines in values,
+# but we know of no workaround that is simple, portable, and efficient.
+# So, we kill variables containing newlines.
+# Ultrix sh set writes to stderr and can't be redirected directly,
+# and sets the high bit in the cache file unless we assign to the vars.
+(
+ for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
+ eval ac_val=\$$ac_var
+ case $ac_val in #(
+ *${as_nl}*)
+ case $ac_var in #(
+ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+ esac
+ case $ac_var in #(
+ _ | IFS | as_nl) ;; #(
+ BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+ *) { eval $ac_var=; unset $ac_var;} ;;
+ esac ;;
+ esac
+ done
+
+ (set) 2>&1 |
+ case $as_nl`(ac_space=' '; set) 2>&1` in #(
+ *${as_nl}ac_space=\ *)
+ # `set' does not quote correctly, so add quotes: double-quote
+ # substitution turns \\\\ into \\, and sed turns \\ into \.
+ sed -n \
+ "s/'/'\\\\''/g;
+ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
+ ;; #(
+ *)
+ # `set' quotes correctly as required by POSIX, so do not add quotes.
+ sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+ ;;
+ esac |
+ sort
+) |
+ sed '
+ /^ac_cv_env_/b end
+ t clear
+ :clear
+ s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
+ t end
+ s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
+ :end' >>confcache
+if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
+ if test -w "$cache_file"; then
+ if test "x$cache_file" != "x/dev/null"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
+$as_echo "$as_me: updating cache $cache_file" >&6;}
+ if test ! -f "$cache_file" || test -h "$cache_file"; then
+ cat confcache >"$cache_file"
+ else
+ case $cache_file in #(
+ */* | ?:*)
+ mv -f confcache "$cache_file"$$ &&
+ mv -f "$cache_file"$$ "$cache_file" ;; #(
+ *)
+ mv -f confcache "$cache_file" ;;
+ esac
+ fi
+ fi
+ else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
+$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
+ fi
+fi
+rm -f confcache
+
+test "x$prefix" = xNONE && prefix=$ac_default_prefix
+# Let make expand exec_prefix.
+test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
+
+DEFS=-DHAVE_CONFIG_H
+
+ac_libobjs=
+ac_ltlibobjs=
+U=
+for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
+ # 1. Remove the extension, and $U if already installed.
+ ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
+ ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
+ # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR
+ # will be set to the directory where LIBOBJS objects are built.
+ as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
+ as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
+done
+LIBOBJS=$ac_libobjs
+
+LTLIBOBJS=$ac_ltlibobjs
+
+
+
+: "${CONFIG_STATUS=./config.status}"
+ac_write_fail=0
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files $CONFIG_STATUS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
+$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
+as_write_fail=0
+cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+# Run this file to recreate the current configuration.
+# Compiler output produced by configure, useful for debugging
+# configure, is in config.log if it exists.
+
+debug=false
+ac_cs_recheck=false
+ac_cs_silent=false
+
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+ emulate sh
+ NULLCMD=:
+ # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '${1+"$@"}'='"$@"'
+ setopt NO_GLOB_SUBST
+else
+ case `(set -o) 2>/dev/null` in #(
+ *posix*) :
+ set -o posix ;; #(
+ *) :
+ ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='print -r --'
+ as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='printf %s\n'
+ as_echo_n='printf %s'
+else
+ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+ as_echo_n='/usr/ucb/echo -n'
+ else
+ as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+ as_echo_n_body='eval
+ arg=$1;
+ case $arg in #(
+ *"$as_nl"*)
+ expr "X$arg" : "X\\(.*\\)$as_nl";
+ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+ esac;
+ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+ '
+ export as_echo_n_body
+ as_echo_n='sh -c $as_echo_n_body as_echo'
+ fi
+ export as_echo_body
+ as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+ PATH_SEPARATOR=:
+ (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+ (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+ PATH_SEPARATOR=';'
+ }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order. Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" "" $as_nl"
+
+# Find who we are. Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+ *[\\/]* ) as_myself=$0 ;;
+ *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+ as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+ exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there. '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+ as_status=$1; test $as_status -eq 0 && as_status=1
+ if test "$4"; then
+ as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+ fi
+ $as_echo "$as_me: error: $2" >&2
+ as_fn_exit $as_status
+} # as_fn_error
+
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+ return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+ set +e
+ as_fn_set_status $1
+ exit $1
+} # as_fn_exit
+
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+ { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+ eval 'as_fn_append ()
+ {
+ eval $1+=\$2
+ }'
+else
+ as_fn_append ()
+ {
+ eval $1=\$$1\$2
+ }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+ eval 'as_fn_arith ()
+ {
+ as_val=$(( $* ))
+ }'
+else
+ as_fn_arith ()
+ {
+ as_val=`expr "$@" || test $? -eq 1`
+ }
+fi # as_fn_arith
+
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+ test "X`expr 00001 : '.*\(...\)'`" = X001; then
+ as_expr=expr
+else
+ as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+ as_basename=basename
+else
+ as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+ as_dirname=dirname
+else
+ as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+ X"$0" : 'X\(//\)$' \| \
+ X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+ sed '/^.*\/\([^/][^/]*\)\/*$/{
+ s//\1/
+ q
+ }
+ /^X\/\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\/\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+ case `echo 'xy\c'` in
+ *c*) ECHO_T=' ';; # ECHO_T is single tab character.
+ xy) ECHO_C='\c';;
+ *) echo `echo ksh88 bug on AIX 6.1` > /dev/null
+ ECHO_T=' ';;
+ esac;;
+*)
+ ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+ rm -f conf$$.dir/conf$$.file
+else
+ rm -f conf$$.dir
+ mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+ if ln -s conf$$.file conf$$ 2>/dev/null; then
+ as_ln_s='ln -s'
+ # ... but there are two gotchas:
+ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+ # In both cases, we have to default to `cp -pR'.
+ ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+ as_ln_s='cp -pR'
+ elif ln conf$$.file conf$$ 2>/dev/null; then
+ as_ln_s=ln
+ else
+ as_ln_s='cp -pR'
+ fi
+else
+ as_ln_s='cp -pR'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+ case $as_dir in #(
+ -*) as_dir=./$as_dir;;
+ esac
+ test -d "$as_dir" || eval $as_mkdir_p || {
+ as_dirs=
+ while :; do
+ case $as_dir in #(
+ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+ *) as_qdir=$as_dir;;
+ esac
+ as_dirs="'$as_qdir' $as_dirs"
+ as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+ X"$as_dir" : 'X\(//\)[^/]' \| \
+ X"$as_dir" : 'X\(//\)$' \| \
+ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)[^/].*/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+ test -d "$as_dir" && break
+ done
+ test -z "$as_dirs" || eval "mkdir $as_dirs"
+ } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+if mkdir -p . 2>/dev/null; then
+ as_mkdir_p='mkdir -p "$as_dir"'
+else
+ test -d ./-p && rmdir ./-p
+ as_mkdir_p=false
+fi
+
+
+# as_fn_executable_p FILE
+# -----------------------
+# Test if FILE is an executable regular file.
+as_fn_executable_p ()
+{
+ test -f "$1" && test -x "$1"
+} # as_fn_executable_p
+as_test_x='test -x'
+as_executable_p=as_fn_executable_p
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+
+exec 6>&1
+## ----------------------------------- ##
+## Main body of $CONFIG_STATUS script. ##
+## ----------------------------------- ##
+_ASEOF
+test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# Save the log message, to keep $0 and so on meaningful, and to
+# report actual input values of CONFIG_FILES etc. instead of their
+# values after options handling.
+ac_log="
+This file was extended by codylib $as_me 0.0, which was
+generated by GNU Autoconf 2.69. Invocation command line was
+
+ CONFIG_FILES = $CONFIG_FILES
+ CONFIG_HEADERS = $CONFIG_HEADERS
+ CONFIG_LINKS = $CONFIG_LINKS
+ CONFIG_COMMANDS = $CONFIG_COMMANDS
+ $ $0 $@
+
+on `(hostname || uname -n) 2>/dev/null | sed 1q`
+"
+
+_ACEOF
+
+case $ac_config_files in *"
+"*) set x $ac_config_files; shift; ac_config_files=$*;;
+esac
+
+case $ac_config_headers in *"
+"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
+esac
+
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+# Files that config.status was made for.
+config_files="$ac_config_files"
+config_headers="$ac_config_headers"
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ac_cs_usage="\
+\`$as_me' instantiates files and other configuration actions
+from templates according to the current configuration. Unless the files
+and actions are specified as TAGs, all are instantiated by default.
+
+Usage: $0 [OPTION]... [TAG]...
+
+ -h, --help print this help, then exit
+ -V, --version print version number and configuration settings, then exit
+ --config print configuration, then exit
+ -q, --quiet, --silent
+ do not print progress messages
+ -d, --debug don't remove temporary files
+ --recheck update $as_me by reconfiguring in the same conditions
+ --file=FILE[:TEMPLATE]
+ instantiate the configuration file FILE
+ --header=FILE[:TEMPLATE]
+ instantiate the configuration header FILE
+
+Configuration files:
+$config_files
+
+Configuration headers:
+$config_headers
+
+Report bugs to <github.com/urnathan/libcody>."
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
+ac_cs_version="\\
+codylib config.status 0.0
+configured by $0, generated by GNU Autoconf 2.69,
+ with options \\"\$ac_cs_config\\"
+
+Copyright (C) 2012 Free Software Foundation, Inc.
+This config.status script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it."
+
+ac_pwd='$ac_pwd'
+srcdir='$srcdir'
+test -n "\$AWK" || AWK=awk
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# The default lists apply if the user does not specify any file.
+ac_need_defaults=:
+while test $# != 0
+do
+ case $1 in
+ --*=?*)
+ ac_option=`expr "X$1" : 'X\([^=]*\)='`
+ ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
+ ac_shift=:
+ ;;
+ --*=)
+ ac_option=`expr "X$1" : 'X\([^=]*\)='`
+ ac_optarg=
+ ac_shift=:
+ ;;
+ *)
+ ac_option=$1
+ ac_optarg=$2
+ ac_shift=shift
+ ;;
+ esac
+
+ case $ac_option in
+ # Handling of the options.
+ -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
+ ac_cs_recheck=: ;;
+ --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
+ $as_echo "$ac_cs_version"; exit ;;
+ --config | --confi | --conf | --con | --co | --c )
+ $as_echo "$ac_cs_config"; exit ;;
+ --debug | --debu | --deb | --de | --d | -d )
+ debug=: ;;
+ --file | --fil | --fi | --f )
+ $ac_shift
+ case $ac_optarg in
+ *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+ '') as_fn_error $? "missing file argument" ;;
+ esac
+ as_fn_append CONFIG_FILES " '$ac_optarg'"
+ ac_need_defaults=false;;
+ --header | --heade | --head | --hea )
+ $ac_shift
+ case $ac_optarg in
+ *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+ esac
+ as_fn_append CONFIG_HEADERS " '$ac_optarg'"
+ ac_need_defaults=false;;
+ --he | --h)
+ # Conflict between --help and --header
+ as_fn_error $? "ambiguous option: \`$1'
+Try \`$0 --help' for more information.";;
+ --help | --hel | -h )
+ $as_echo "$ac_cs_usage"; exit ;;
+ -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+ | -silent | --silent | --silen | --sile | --sil | --si | --s)
+ ac_cs_silent=: ;;
+
+ # This is an error.
+ -*) as_fn_error $? "unrecognized option: \`$1'
+Try \`$0 --help' for more information." ;;
+
+ *) as_fn_append ac_config_targets " $1"
+ ac_need_defaults=false ;;
+
+ esac
+ shift
+done
+
+ac_configure_extra_args=
+
+if $ac_cs_silent; then
+ exec 6>/dev/null
+ ac_configure_extra_args="$ac_configure_extra_args --silent"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+if \$ac_cs_recheck; then
+ set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
+ shift
+ \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
+ CONFIG_SHELL='$SHELL'
+ export CONFIG_SHELL
+ exec "\$@"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+exec 5>>config.log
+{
+ echo
+ sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
+## Running $as_me. ##
+_ASBOX
+ $as_echo "$ac_log"
+} >&5
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+
+# Handling of arguments.
+for ac_config_target in $ac_config_targets
+do
+ case $ac_config_target in
+ "$CONFIG_FILES") CONFIG_FILES="$CONFIG_FILES $CONFIG_FILES" ;;
+ "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
+
+ *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+ esac
+done
+
+
+# If the user did not use the arguments to specify the items to instantiate,
+# then the envvar interface is used. Set only those that are not.
+# We use the long form for the default assignment because of an extremely
+# bizarre bug on SunOS 4.1.3.
+if $ac_need_defaults; then
+ test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+ test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
+fi
+
+# Have a temporary directory for convenience. Make it in the build tree
+# simply because there is no reason against having it here, and in addition,
+# creating and moving files from /tmp can sometimes cause problems.
+# Hook for its removal unless debugging.
+# Note that there is a small window in which the directory will not be cleaned:
+# after its creation but before its name has been assigned to `$tmp'.
+$debug ||
+{
+ tmp= ac_tmp=
+ trap 'exit_status=$?
+ : "${ac_tmp:=$tmp}"
+ { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+' 0
+ trap 'as_fn_exit 1' 1 2 13 15
+}
+# Create a (secure) tmp directory for tmp files.
+
+{
+ tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
+ test -d "$tmp"
+} ||
+{
+ tmp=./conf$$-$RANDOM
+ (umask 077 && mkdir "$tmp")
+} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
+ac_tmp=$tmp
+
+# Set up the scripts for CONFIG_FILES section.
+# No need to generate them if there are no CONFIG_FILES.
+# This happens for instance with `./config.status config.h'.
+if test -n "$CONFIG_FILES"; then
+
+
+ac_cr=`echo X | tr X '\015'`
+# On cygwin, bash can eat \r inside `` if the user requested igncr.
+# But we know of no other shell where ac_cr would be empty at this
+# point, so we can use a bashism as a fallback.
+if test "x$ac_cr" = x; then
+ eval ac_cr=\$\'\\r\'
+fi
+ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
+if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
+ ac_cs_awk_cr='\\r'
+else
+ ac_cs_awk_cr=$ac_cr
+fi
+
+echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+_ACEOF
+
+
+{
+ echo "cat >conf$$subs.awk <<_ACEOF" &&
+ echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
+ echo "_ACEOF"
+} >conf$$subs.sh ||
+ as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+ac_delim='%!_!# '
+for ac_last_try in false false false false false :; do
+ . ./conf$$subs.sh ||
+ as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+
+ ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
+ if test $ac_delim_n = $ac_delim_num; then
+ break
+ elif $ac_last_try; then
+ as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ else
+ ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+ fi
+done
+rm -f conf$$subs.sh
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+_ACEOF
+sed -n '
+h
+s/^/S["/; s/!.*/"]=/
+p
+g
+s/^[^!]*!//
+:repl
+t repl
+s/'"$ac_delim"'$//
+t delim
+:nl
+h
+s/\(.\{148\}\)..*/\1/
+t more1
+s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
+p
+n
+b repl
+:more1
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t nl
+:delim
+h
+s/\(.\{148\}\)..*/\1/
+t more2
+s/["\\]/\\&/g; s/^/"/; s/$/"/
+p
+b
+:more2
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t delim
+' <conf$$subs.awk | sed '
+/^[^""]/{
+ N
+ s/\n//
+}
+' >>$CONFIG_STATUS || ac_write_fail=1
+rm -f conf$$subs.awk
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACAWK
+cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+ for (key in S) S_is_set[key] = 1
+ FS = ""
+
+}
+{
+ line = $ 0
+ nfields = split(line, field, "@")
+ substed = 0
+ len = length(field[1])
+ for (i = 2; i < nfields; i++) {
+ key = field[i]
+ keylen = length(key)
+ if (S_is_set[key]) {
+ value = S[key]
+ line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
+ len += length(value) + length(field[++i])
+ substed = 1
+ } else
+ len += 1 + keylen
+ }
+
+ print line
+}
+
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
+ sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
+else
+ cat
+fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+# trailing colons and then remove the whole line if VPATH becomes empty
+# (actually we leave an empty line to preserve line numbers).
+if test "x$srcdir" = x.; then
+ ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{
+h
+s///
+s/^/:/
+s/[ ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
+s/:*$//
+x
+s/\(=[ ]*\).*/\1/
+G
+s/\n//
+s/^[^=]*=[ ]*$//
+}'
+fi
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+fi # test -n "$CONFIG_FILES"
+
+# Set up the scripts for CONFIG_HEADERS section.
+# No need to generate them if there are no CONFIG_HEADERS.
+# This happens for instance with `./config.status Makefile'.
+if test -n "$CONFIG_HEADERS"; then
+cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+BEGIN {
+_ACEOF
+
+# Transform confdefs.h into an awk script `defines.awk', embedded as
+# here-document in config.status, that substitutes the proper values into
+# config.h.in to produce config.h.
+
+# Create a delimiter string that does not exist in confdefs.h, to ease
+# handling of long lines.
+ac_delim='%!_!# '
+for ac_last_try in false false :; do
+ ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
+ if test -z "$ac_tt"; then
+ break
+ elif $ac_last_try; then
+ as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
+ else
+ ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+ fi
+done
+
+# For the awk script, D is an array of macro values keyed by name,
+# likewise P contains macro parameters if any. Preserve backslash
+# newline sequences.
+
+ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
+sed -n '
+s/.\{148\}/&'"$ac_delim"'/g
+t rset
+:rset
+s/^[ ]*#[ ]*define[ ][ ]*/ /
+t def
+d
+:def
+s/\\$//
+t bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3"/p
+s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p
+d
+:bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3\\\\\\n"\\/p
+t cont
+s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
+t cont
+d
+:cont
+n
+s/.\{148\}/&'"$ac_delim"'/g
+t clear
+:clear
+s/\\$//
+t bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/"/p
+d
+:bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
+b cont
+' <confdefs.h | sed '
+s/'"$ac_delim"'/"\\\
+"/g' >>$CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ for (key in D) D_is_set[key] = 1
+ FS = ""
+}
+/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
+ line = \$ 0
+ split(line, arg, " ")
+ if (arg[1] == "#") {
+ defundef = arg[2]
+ mac1 = arg[3]
+ } else {
+ defundef = substr(arg[1], 2)
+ mac1 = arg[2]
+ }
+ split(mac1, mac2, "(") #)
+ macro = mac2[1]
+ prefix = substr(line, 1, index(line, defundef) - 1)
+ if (D_is_set[macro]) {
+ # Preserve the white space surrounding the "#".
+ print prefix "define", macro P[macro] D[macro]
+ next
+ } else {
+ # Replace #undef with comments. This is necessary, for example,
+ # in the case of _POSIX_SOURCE, which is predefined and required
+ # on some systems where configure will not decide to define it.
+ if (defundef == "undef") {
+ print "/*", prefix defundef, macro, "*/"
+ next
+ }
+ }
+}
+{ print }
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
+fi # test -n "$CONFIG_HEADERS"
+
+
+eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS "
+shift
+for ac_tag
+do
+ case $ac_tag in
+ :[FHLC]) ac_mode=$ac_tag; continue;;
+ esac
+ case $ac_mode$ac_tag in
+ :[FHL]*:*);;
+ :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
+ :[FH]-) ac_tag=-:-;;
+ :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
+ esac
+ ac_save_IFS=$IFS
+ IFS=:
+ set x $ac_tag
+ IFS=$ac_save_IFS
+ shift
+ ac_file=$1
+ shift
+
+ case $ac_mode in
+ :L) ac_source=$1;;
+ :[FH])
+ ac_file_inputs=
+ for ac_f
+ do
+ case $ac_f in
+ -) ac_f="$ac_tmp/stdin";;
+ *) # Look for the file first in the build tree, then in the source tree
+ # (if the path is not absolute). The absolute path cannot be DOS-style,
+ # because $ac_f cannot contain `:'.
+ test -f "$ac_f" ||
+ case $ac_f in
+ [\\/$]*) false;;
+ *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
+ esac ||
+ as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+ esac
+ case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
+ as_fn_append ac_file_inputs " '$ac_f'"
+ done
+
+ # Let's still pretend it is `configure' which instantiates (i.e., don't
+ # use $as_me), people would be surprised to read:
+ # /* config.h. Generated by config.status. */
+ configure_input='Generated from '`
+ $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
+ `' by configure.'
+ if test x"$ac_file" != x-; then
+ configure_input="$ac_file. $configure_input"
+ { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
+$as_echo "$as_me: creating $ac_file" >&6;}
+ fi
+ # Neutralize special characters interpreted by sed in replacement strings.
+ case $configure_input in #(
+ *\&* | *\|* | *\\* )
+ ac_sed_conf_input=`$as_echo "$configure_input" |
+ sed 's/[\\\\&|]/\\\\&/g'`;; #(
+ *) ac_sed_conf_input=$configure_input;;
+ esac
+
+ case $ac_tag in
+ *:-:* | *:-) cat >"$ac_tmp/stdin" \
+ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
+ esac
+ ;;
+ esac
+
+ ac_dir=`$as_dirname -- "$ac_file" ||
+$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+ X"$ac_file" : 'X\(//\)[^/]' \| \
+ X"$ac_file" : 'X\(//\)$' \| \
+ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$ac_file" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)[^/].*/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+ as_dir="$ac_dir"; as_fn_mkdir_p
+ ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+ # A ".." for each directory in $ac_dir_suffix.
+ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+ case $ac_top_builddir_sub in
+ "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+ *) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+ esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+ .) # We are building in place.
+ ac_srcdir=.
+ ac_top_srcdir=$ac_top_builddir_sub
+ ac_abs_top_srcdir=$ac_pwd ;;
+ [\\/]* | ?:[\\/]* ) # Absolute name.
+ ac_srcdir=$srcdir$ac_dir_suffix;
+ ac_top_srcdir=$srcdir
+ ac_abs_top_srcdir=$srcdir ;;
+ *) # Relative name.
+ ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+ ac_top_srcdir=$ac_top_build_prefix$srcdir
+ ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+
+ case $ac_mode in
+ :F)
+ #
+ # CONFIG_FILE
+ #
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# If the template does not know about datarootdir, expand it.
+# FIXME: This hack should be removed a few years after 2.60.
+ac_datarootdir_hack=; ac_datarootdir_seen=
+ac_sed_dataroot='
+/datarootdir/ {
+ p
+ q
+}
+/@datadir@/p
+/@docdir@/p
+/@infodir@/p
+/@localedir@/p
+/@mandir@/p'
+case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
+*datarootdir*) ac_datarootdir_seen=yes;;
+*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
+$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ ac_datarootdir_hack='
+ s&@datadir@&$datadir&g
+ s&@docdir@&$docdir&g
+ s&@infodir@&$infodir&g
+ s&@localedir@&$localedir&g
+ s&@mandir@&$mandir&g
+ s&\\\${datarootdir}&$datarootdir&g' ;;
+esac
+_ACEOF
+
+# Neutralize VPATH when `$srcdir' = `.'.
+# Shell code in configure.ac might set extrasub.
+# FIXME: do we really want to maintain this feature?
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_sed_extra="$ac_vpsub
+$extrasub
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+:t
+/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
+s|@configure_input@|$ac_sed_conf_input|;t t
+s&@top_builddir@&$ac_top_builddir_sub&;t t
+s&@top_build_prefix@&$ac_top_build_prefix&;t t
+s&@srcdir@&$ac_srcdir&;t t
+s&@abs_srcdir@&$ac_abs_srcdir&;t t
+s&@top_srcdir@&$ac_top_srcdir&;t t
+s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
+s&@builddir@&$ac_builddir&;t t
+s&@abs_builddir@&$ac_abs_builddir&;t t
+s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
+$ac_datarootdir_hack
+"
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
+ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+
+test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
+ { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
+ { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \
+ "$ac_tmp/out"`; test -z "$ac_out"; } &&
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined. Please make sure it is defined" >&5
+$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined. Please make sure it is defined" >&2;}
+
+ rm -f "$ac_tmp/stdin"
+ case $ac_file in
+ -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
+ *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+ esac \
+ || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ ;;
+ :H)
+ #
+ # CONFIG_HEADER
+ #
+ if test x"$ac_file" != x-; then
+ {
+ $as_echo "/* $configure_input */" \
+ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
+ } >"$ac_tmp/config.h" \
+ || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
+$as_echo "$as_me: $ac_file is unchanged" >&6;}
+ else
+ rm -f "$ac_file"
+ mv "$ac_tmp/config.h" "$ac_file" \
+ || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ fi
+ else
+ $as_echo "/* $configure_input */" \
+ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+ || as_fn_error $? "could not create -" "$LINENO" 5
+ fi
+ ;;
+
+
+ esac
+
+done # for ac_tag
+
+
+as_fn_exit 0
+_ACEOF
+ac_clean_files=$ac_clean_files_save
+
+test $ac_write_fail = 0 ||
+ as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
+
+
+# configure is writing to config.log, and then calls config.status.
+# config.status does its own redirection, appending to config.log.
+# Unfortunately, on DOS this fails, as config.log is still kept open
+# by configure, so config.status won't be able to write to it; its
+# output is simply discarded. So we exec the FD to /dev/null,
+# effectively closing config.log, so it can be properly (re)opened and
+# appended to by config.status. When coming back to configure, we
+# need to make the FD available again.
+if test "$no_create" != yes; then
+ ac_cs_success=:
+ ac_config_status_args=
+ test "$silent" = yes &&
+ ac_config_status_args="$ac_config_status_args --quiet"
+ exec 5>/dev/null
+ $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
+ exec 5>>config.log
+ # Use ||, not &&, to avoid exiting from the if with $? = 1, which
+ # would make configure fail if this is the last instruction.
+ $ac_cs_success || as_fn_exit 1
+fi
+if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
+$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
+fi
+
diff --git a/libcody/configure.ac b/libcody/configure.ac
new file mode 100644
index 0000000..31f041e
--- /dev/null
+++ b/libcody/configure.ac
@@ -0,0 +1,37 @@
+# CODYlib -*- mode:autoconf -*-
+# Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+AC_INIT([codylib],[0.0],[github.com/urnathan/libcody])
+AC_CONFIG_SRCDIR(cody.hh)
+m4_include(config.m4)
+
+AC_CONFIG_AUX_DIR(build-aux)
+AC_SUBST(PACKAGE_VERSION)
+
+NMS_NOT_IN_SOURCE
+AC_CANONICAL_HOST
+
+NMS_TOOLS
+NMS_NUM_CPUS
+NMS_MAINTAINER_MODE
+NMS_CXX_COMPILER
+AC_LANG(C++)
+AC_PROG_CXX
+NMS_CXX_11
+NMS_TOOL_DIRS
+NMS_LINK_OPT([-Wl,--no-undefined])
+NMS_CONFIG_FILES([gdbinit dox.cfg])
+
+NMS_BUGURL
+NMS_ENABLE_CHECKING
+NMS_ENABLE_EXCEPTIONS
+
+AC_CONFIG_HEADERS([config.h])
+AC_CHECK_TOOL([AR],[ar])
+AC_CHECK_PROG([DOXYGEN],[doxygen],[doxygen],[: NOTdoxygen])
+AC_CHECK_PROG([ALOY],[aloy],[aloy],[: Joust testsuite missing])
+AH_VERBATIM([_GNU_SOURCE],[#define _GNU_SOURCE 1])
+AH_VERBATIM([_FORTIFY_SOURCE],[#undef _FORTIFY_SOURCE])
+
+AC_OUTPUT
diff --git a/libcody/dox.cfg.in b/libcody/dox.cfg.in
new file mode 100644
index 0000000..1a39b2a
--- /dev/null
+++ b/libcody/dox.cfg.in
@@ -0,0 +1,2478 @@
+# Doxyfile 1.8.15
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the configuration
+# file that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME = "libcody"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER = @PACKAGE_VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF = "A Compiler/Build System Interface"
+
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
+
+PROJECT_LOGO =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY = dox
+
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS = NO
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE = English
+
+# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all generated output in the proper direction.
+# Possible values are: None, LTR, RTL and Context.
+# The default value is: None.
+
+OUTPUT_TEXT_DIRECTION = None
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF = "The $name class" \
+ "The $name widget" \
+ "The $name file" \
+ is \
+ provides \
+ specifies \
+ contains \
+ represents \
+ a \
+ an \
+ the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES = NO
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = YES
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE = 8
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines (in the resulting output). You can put ^^ in the value part of an
+# alias to insert a newline as if a physical newline was in the original file.
+# When you need a literal { or } or , in the value part of an alias you have to
+# escape them by means of a backslash (\), this can lead to conflicts with the
+# commands \{ and \} for these it is advised to use the version @{ and @} or use
+# a double escape (\\{ and \\})
+
+ALIASES =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
+# sources only. Doxygen will then generate output that is more tailored for that
+# language. For instance, namespaces will be presented as modules, types will be
+# separated into more groups, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_SLICE = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
+# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
+# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
+# tries to guess whether the code is fixed or free formatted code, this is the
+# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
+# .inc files as Fortran files (default is PHP), and .f files as C (default is
+# Fortran), use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING = no_extension=C++ \
+ hh=C++
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See https://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT = YES
+
+# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
+# to that level are automatically included in the table of contents, even if
+# they do not have an id attribute.
+# Note: This feature currently applies only to Markdown headings.
+# Minimum value: 0, maximum value: 99, default value: 0.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
+
+TOC_INCLUDE_HEADINGS = 0
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT = YES
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO, these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES, upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
+# list will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong or incomplete
+# parameter documentation, but not about the absence of documentation. If
+# EXTRACT_ALL is set to YES then this flag will automatically be disabled.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC = NO
+
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered.
+# The default value is: NO.
+
+WARN_AS_ERROR = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+INPUT = @srcdir@
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
+# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
+# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
+# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
+# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
+
+FILE_PATTERNS = *.cc *.hh
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE = NO
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE = README.md
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# entity all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see https://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to YES can help to show when doxygen was last run and thus if the
+# documentation is up to date.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP = NO
+
+# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
+# documentation will contain a main index with vertical navigation menus that
+# are dynamically created via Javascript. If disabled, the navigation index will
+# consists of multiple levels of tabs that are statically embedded in every HTML
+# page. Disable this option to support browsers that do not have Javascript,
+# like the Qt help browser.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_MENUS = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: https://developer.apple.com/xcode/), introduced with OSX
+# 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
+# genXcode/_index.html for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the master .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH = 250
+
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE = 10
+
+# Use the FORMULA_TRANSPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# https://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from https://www.mathjax.org before deployment.
+# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH = https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: https://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: https://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX = YES
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when not enabling USE_PDFLATEX the default is latex when enabling
+# USE_PDFLATEX the default is pdflatex and when in the later case latex is
+# chosen this is overwritten by pdflatex. For specific output languages the
+# default can have been set differently, this depends on the implementation of
+# the output language.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME =
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# Note: This tag is used in the Makefile / make.bat.
+# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file
+# (.tex).
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to
+# generate index for LaTeX.
+# Note: This tag is used in the generated output file (.tex).
+# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat.
+# The default value is: \makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_MAKEINDEX_CMD = \makeindex
+
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
+# string, for the replacement values of the other commands the user is referred
+# to HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_STYLESHEET =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES, to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# https://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE = plain
+
+# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_TIMESTAMP = NO
+
+# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
+# path from which the emoji images will be read. If a relative path is entered,
+# it will be relative to the LATEX_OUTPUT directory. If left blank the
+# LATEX_OUTPUT directory will be used.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EMOJI_DIRECTORY =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# configuration file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's configuration file. A template extensions file can be
+# generated using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE =
+
+# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
+# with syntax highlighting in the RTF output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_SOURCE_CODE = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING = YES
+
+# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include
+# namespace members in file scope as well, matching the HTML output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_NS_MEMB_FILE_SCOPE = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures
+# the structure of the code including all documentation. Note that this feature
+# is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
+# the class index. If set to NO, only the inherited external classes will be
+# listed.
+# The default value is: NO.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS = YES
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH =
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS = 0
+
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH = NO
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# http://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS =
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+
+PLANTUML_JAR_PATH =
+
+# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
+# configuration file for plantuml.
+
+PLANTUML_CFG_FILE =
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS = NO
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP = YES
diff --git a/libcody/fatal.cc b/libcody/fatal.cc
new file mode 100644
index 0000000..b35094e
--- /dev/null
+++ b/libcody/fatal.cc
@@ -0,0 +1,78 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2019-2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+// C
+#include <csignal>
+#include <cstdint>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+
+namespace Cody {
+
+#if NMS_CHECKING
+void (AssertFailed) (Location loc) noexcept
+{
+ (HCF) ("assertion failed", loc);
+}
+void (Unreachable) (Location loc) noexcept
+{
+ (HCF) ("unreachable reached", loc);
+}
+#endif
+
+void (HCF) (char const *msg
+#if NMS_CHECKING
+ , Location const loc
+#endif
+ ) noexcept
+{ // HCF - you goofed!
+ __asm__ volatile ("nop"); // HCF - you goofed!
+
+#if !NMS_CHECKING
+ constexpr Location loc (nullptr, 0);
+#endif
+
+ fprintf (stderr, "CODYlib: %s", msg ? msg : "internal error");
+ if (char const *file = loc.File ())
+ {
+ char const *src = SRCDIR;
+
+ if (src[0])
+ {
+ size_t l = strlen (src);
+
+ if (!strncmp (src, file, l) && file[l] == '/')
+ file += l + 1;
+ }
+ fprintf (stderr, " at %s:%u", file, loc.Line ());
+ }
+ fprintf (stderr, "\n");
+ raise (SIGABRT);
+ exit (2);
+}
+
+void BuildNote (FILE *stream) noexcept
+{
+ fprintf (stream, "Version %s.\n", PACKAGE_NAME " " PACKAGE_VERSION);
+ fprintf (stream, "Report bugs to %s.\n", BUGURL[0] ? BUGURL : "you");
+ if (PACKAGE_URL[0])
+ fprintf (stream, "See %s for more information.\n", PACKAGE_URL);
+ if (REVISION[0])
+ fprintf (stream, "Source %s.\n", REVISION);
+
+ fprintf (stream, "Build is %s & %s.\n",
+#if !NMS_CHECKING
+ "un"
+#endif
+ "checked",
+#if !__OPTIMIZE__
+ "un"
+#endif
+ "optimized");
+}
+
+}
diff --git a/libcody/gdbinit.in b/libcody/gdbinit.in
new file mode 100644
index 0000000..b81b7d8
--- /dev/null
+++ b/libcody/gdbinit.in
@@ -0,0 +1,11 @@
+# CODYlib -*- mode:autoconf -*-
+# Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+dir @srcdir@
+# some default breakpoints, to catch the fatal exit
+set breakpoint pending on
+break HCF
+set complaints 0
+break exit
+set breakpoint pending off
diff --git a/libcody/internal.hh b/libcody/internal.hh
new file mode 100644
index 0000000..d744b56
--- /dev/null
+++ b/libcody/internal.hh
@@ -0,0 +1,125 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+#include "cody.hh"
+
+// C++
+#if __GNUC__ >= 10
+#define CODY_LOC_BUILTIN 1
+#elif !defined (__has_include)
+#elif __has_include (<source_location>)
+#include <source_location>
+#define CODY_LOC_SOURCE 1
+#endif
+// C
+#include <cstdio>
+
+namespace Cody {
+
+// Location is needed regardless of checking, to make the fatal
+// handler simpler
+class Location
+{
+protected:
+ char const *file;
+ unsigned line;
+
+public:
+ constexpr Location (char const *file_
+#if CODY_LOC_BUILTIN
+ = __builtin_FILE ()
+#elif !CODY_LOC_SOURCE
+ = nullptr
+#endif
+ , unsigned line_
+#if CODY_LOC_BUILTIN
+ = __builtin_LINE ()
+#elif !CODY_LOC_SOURCE
+ = 0
+#endif
+ )
+ :file (file_), line (line_)
+ {
+ }
+
+#if !CODY_LOC_BUILTIN && CODY_LOC_SOURCE
+ constexpr Location (source_location loc = source_location::current ())
+ : Location (loc.file (), loc.line ())
+ {
+ }
+#endif
+
+public:
+ constexpr char const *File () const
+ {
+ return file;
+ }
+ constexpr unsigned Line () const
+ {
+ return line;
+ }
+};
+
+void HCF [[noreturn]]
+(
+ char const *msg
+#if NMS_CHECKING
+ , Location const = Location ()
+#if !CODY_LOC_BUILTIN && !CODY_LOC_SOURCE
+#define HCF(M) HCF ((M), Cody::Location (__FILE__, __LINE__))
+#endif
+#endif
+ ) noexcept;
+
+#if NMS_CHECKING
+void AssertFailed [[noreturn]] (Location loc = Location ()) noexcept;
+void Unreachable [[noreturn]] (Location loc = Location ()) noexcept;
+#if !CODY_LOC_BUILTIN && !CODY_LOC_SOURCE
+#define AssertFailed() AssertFailed (Cody::Location (__FILE__, __LINE__))
+#define Unreachable() Unreachable (Cody::Location (__FILE__, __LINE__))
+#endif
+
+// Do we have __VA_OPT__, alas no specific feature macro for it :(
+// From stack overflow
+// https://stackoverflow.com/questions/48045470/portably-detect-va-opt-support
+// Relies on having variadic macros, but they're a C++11 thing, so
+// we're good
+#define HAVE_ARG_3(a,b,c,...) c
+#define HAVE_VA_OPT_(...) HAVE_ARG_3(__VA_OPT__(,),true,false,)
+#define HAVE_VA_OPT HAVE_VA_OPT_(?)
+
+// Oh, for lazily evaluated function parameters
+#if HAVE_VA_OPT
+// Assert is variadic, so you can write Assert (TPL<A,B>(C)) without
+// extraneous parens. I don't think we need that though.
+#define Assert(EXPR, ...) \
+ (__builtin_expect (bool (EXPR __VA_OPT__ (, __VA_ARGS__)), true) \
+ ? (void)0 : AssertFailed ())
+#else
+// If you don't have the GNU ,##__VA_ARGS__ pasting extension, we'll
+// need another fallback
+#define Assert(EXPR, ...) \
+ (__builtin_expect (bool (EXPR, ##__VA_ARGS__), true) \
+ ? (void)0 : AssertFailed ())
+#endif
+#else
+// Not asserting, use EXPR in an unevaluated context
+#if HAVE_VA_OPT
+#define Assert(EXPR, ...) \
+ ((void)sizeof (bool (EXPR __VA_OPT__ (, __VA_ARGS__))), (void)0)
+#else
+#define Assert(EXPR, ...) \
+ ((void)sizeof (bool (EXPR, ##__VA_ARGS__)), (void)0)
+#endif
+
+inline void Unreachable () noexcept
+{
+ __builtin_unreachable ();
+}
+#endif
+
+// FIXME: This should be user visible in some way
+void BuildNote (FILE *stream) noexcept;
+
+}
diff --git a/libcody/netclient.cc b/libcody/netclient.cc
new file mode 100644
index 0000000..8cccfac
--- /dev/null
+++ b/libcody/netclient.cc
@@ -0,0 +1,140 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+
+#if CODY_NETWORKING
+// C
+#include <cerrno>
+#include <cstring>
+// OS
+#include <netdb.h>
+#include <unistd.h>
+#include <arpa/inet.h>
+#include <sys/un.h>
+
+#ifndef AI_NUMERICSERV
+#define AI_NUMERICSERV 0
+#endif
+
+// Client-side networking helpers
+
+namespace Cody {
+
+int OpenSocket (char const **e, sockaddr const *addr, socklen_t len)
+{
+ char const *errstr = nullptr;
+
+ int fd = socket (addr->sa_family, SOCK_STREAM, 0);
+ if (fd < 0)
+ {
+ errstr = "creating socket";
+
+ fail:;
+ int err = errno;
+ if (e)
+ *e = errstr;
+ if (fd >= 0)
+ close (fd);
+ errno = err;
+ return -1;
+ }
+
+ if (connect (fd, addr, len) < 0)
+ {
+ errstr = "connecting socket";
+ goto fail;
+ }
+
+ return fd;
+}
+
+int OpenLocal (char const **e, char const *name)
+{
+ sockaddr_un addr;
+ size_t len = strlen (name);
+
+ if (len >= sizeof (addr.sun_path))
+ {
+ errno = ENAMETOOLONG;
+ return -1;
+ }
+
+ memset (&addr, 0, offsetof (sockaddr_un, sun_path));
+ addr.sun_family = AF_UNIX;
+ memcpy (addr.sun_path, name, len + 1);
+ return OpenSocket (e, (sockaddr *)&addr, sizeof (addr));
+}
+
+int OpenInet6 (char const **e, char const *name, int port)
+{
+ addrinfo *addrs = nullptr;
+ int fd = -1;
+ char const *errstr = nullptr;
+
+ fd = socket (AF_INET6, SOCK_STREAM, 0);
+ if (fd < 0)
+ {
+ errstr = "socket";
+
+ fail:;
+ int err = errno;
+ if (e)
+ *e = errstr;
+ if (fd >= 0)
+ close (fd);
+ if (addrs)
+ freeaddrinfo (addrs);
+ errno = err;
+ return -1;
+ }
+
+ addrinfo hints;
+ hints.ai_flags = AI_NUMERICSERV;
+ hints.ai_family = AF_INET6;
+ hints.ai_socktype = SOCK_STREAM;
+ hints.ai_protocol = 0;
+ hints.ai_addrlen = 0;
+ hints.ai_addr = nullptr;
+ hints.ai_canonname = nullptr;
+ hints.ai_next = nullptr;
+
+ /* getaddrinfo requires a port number, but is quite happy to accept
+ invalid ones. So don't rely on it. */
+ if (int err = getaddrinfo (name, "0", &hints, &addrs))
+ {
+ errstr = gai_strerror (err);
+ // What's the best errno to set?
+ errno = 0;
+ goto fail;
+ }
+
+ sockaddr_in6 addr;
+ memset (&addr, 0, sizeof (addr));
+ addr.sin6_family = AF_INET6;
+
+ for (struct addrinfo *next = addrs; next; next = next->ai_next)
+ if (next->ai_family == AF_INET6
+ && next->ai_socktype == SOCK_STREAM)
+ {
+ sockaddr_in6 *in6 = (sockaddr_in6 *)next->ai_addr;
+ in6->sin6_port = htons (port);
+ if (ntohs (in6->sin6_port) != port)
+ errno = EINVAL;
+ else if (!connect (fd, next->ai_addr, next->ai_addrlen))
+ goto done;
+ }
+ errstr = "connecting";
+ goto fail;
+
+ done:;
+ freeaddrinfo (addrs);
+
+ return fd;
+}
+
+}
+
+#endif
diff --git a/libcody/netserver.cc b/libcody/netserver.cc
new file mode 100644
index 0000000..7e43eb0
--- /dev/null
+++ b/libcody/netserver.cc
@@ -0,0 +1,153 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+#if CODY_NETWORKING
+// C
+#include <cerrno>
+#include <cstring>
+// OS
+#include <netdb.h>
+#include <unistd.h>
+#include <arpa/inet.h>
+#include <sys/un.h>
+
+#ifndef AI_NUMERICSERV
+#define AI_NUMERICSERV 0
+#endif
+
+// Server-side networking helpers
+
+namespace Cody {
+
+int ListenSocket (char const **e, sockaddr const *addr, socklen_t len,
+ unsigned backlog)
+{
+ char const *errstr = nullptr;
+
+ int fd = socket (addr->sa_family, SOCK_STREAM, 0);
+ if (fd < 0)
+ {
+ errstr = "creating socket";
+
+ fail:;
+ int err = errno;
+ if (e)
+ *e = errstr;
+ if (fd >= 0)
+ close (fd);
+ errno = err;
+ return -1;
+ }
+
+ if (bind (fd, addr, len) < 0)
+ {
+ errstr = "binding socket";
+ goto fail;
+ }
+
+ if (listen (fd, backlog ? backlog : 17) < 0)
+ {
+ errstr = "listening socket";
+ goto fail;
+ }
+
+ return fd;
+}
+
+int ListenLocal (char const **e, char const *name, unsigned backlog)
+{
+ sockaddr_un addr;
+ size_t len = strlen (name);
+
+ if (len >= sizeof (addr.sun_path))
+ {
+ errno = ENAMETOOLONG;
+ return -1;
+ }
+
+ memset (&addr, 0, offsetof (sockaddr_un, sun_path));
+ addr.sun_family = AF_UNIX;
+ memcpy (addr.sun_path, name, len + 1);
+
+ return ListenSocket (e, (sockaddr *)&addr, sizeof (addr), backlog);
+}
+
+int ListenInet6 (char const **e, char const *name, int port, unsigned backlog)
+{
+ addrinfo *addrs = nullptr;
+ int fd = -1;
+ char const *errstr = nullptr;
+
+ fd = socket (AF_INET6, SOCK_STREAM, 0);
+ if (fd < 0)
+ {
+ errstr = "creating socket";
+
+ fail:;
+ int err = errno;
+ if (e)
+ *e = errstr;
+ if (fd >= 0)
+ close (fd);
+ if (addrs)
+ freeaddrinfo (addrs);
+ errno = err;
+ return -1;
+ }
+
+ addrinfo hints;
+ hints.ai_flags = AI_NUMERICSERV;
+ hints.ai_family = AF_INET6;
+ hints.ai_socktype = SOCK_STREAM;
+ hints.ai_protocol = 0;
+ hints.ai_addrlen = 0;
+ hints.ai_addr = nullptr;
+ hints.ai_canonname = nullptr;
+ hints.ai_next = nullptr;
+
+ /* getaddrinfo requires a port number, but is quite happy to accept
+ invalid ones. So don't rely on it. */
+ if (int err = getaddrinfo (name, "0", &hints, &addrs))
+ {
+ errstr = gai_strerror (err);
+ // What's the best errno to set?
+ errno = 0;
+ goto fail;
+ }
+
+ sockaddr_in6 addr;
+ memset (&addr, 0, sizeof (addr));
+ addr.sin6_family = AF_INET6;
+
+ for (struct addrinfo *next = addrs; next; next = next->ai_next)
+ if (next->ai_family == AF_INET6
+ && next->ai_socktype == SOCK_STREAM)
+ {
+ sockaddr_in6 *in6 = (sockaddr_in6 *)next->ai_addr;
+ in6->sin6_port = htons (port);
+ if (ntohs (in6->sin6_port) != port)
+ errno = EINVAL;
+ else if (!bind (fd, next->ai_addr, next->ai_addrlen))
+ goto listen;
+ }
+
+ errstr = "binding socket";
+ goto fail;
+
+ listen:;
+ freeaddrinfo (addrs);
+
+ if (listen (fd, backlog ? backlog : 17) < 0)
+ {
+ errstr = "listening socket";
+ goto fail;
+ }
+
+ return fd;
+}
+
+}
+#endif
diff --git a/libcody/packet.cc b/libcody/packet.cc
new file mode 100644
index 0000000..65dc8f4
--- /dev/null
+++ b/libcody/packet.cc
@@ -0,0 +1,50 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+
+namespace Cody {
+
+void Packet::Destroy ()
+{
+ switch (cat)
+ {
+ case STRING:
+ // Silly scope destructor name rules
+ using S = std::string;
+ string.~S ();
+ break;
+
+ case VECTOR:
+ using V = std::vector<std::string>;
+ vector.~V ();
+ break;
+
+ default:;
+ }
+}
+
+void Packet::Create (Packet &&t)
+{
+ cat = t.cat;
+ code = t.code;
+ request = t.request;
+ switch (cat)
+ {
+ case STRING:
+ new (&string) std::string (std::move (t.string));
+ break;
+
+ case VECTOR:
+ new (&vector) std::vector<std::string> (std::move (t.vector));
+ break;
+
+ default:
+ integer = t.integer;
+ break;
+ }
+}
+
+}
diff --git a/libcody/resolver.cc b/libcody/resolver.cc
new file mode 100644
index 0000000..b83880f
--- /dev/null
+++ b/libcody/resolver.cc
@@ -0,0 +1,209 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+// OS
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#if (defined (__unix__) \
+ || (defined (__Apple__) \
+ && defined (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) \
+ && __ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ >= 101000))
+// Autoconf test?
+#define HAVE_FSTATAT 1
+#else
+#define HAVE_FSTATAT 0
+#endif
+
+// Resolver code
+
+#if __windows__
+inline bool IsDirSep (char c)
+{
+ return c == '/' || c == '\\';
+}
+inline bool IsAbsPath (char const *str)
+{
+ // IIRC windows has the concept of per-drive current directories,
+ // which make drive-using paths confusing. Let's not get into that.
+ return IsDirSep (str)
+ || (((str[0] >= 'A' && str[1] <= 'Z')
+ || (str[0] >= 'a' && str[1] <= 'z'))&& str[1] == ':');
+}
+#else
+inline bool IsDirSep (char c)
+{
+ return c == '/';
+}
+inline bool IsAbsPath (char const *str)
+{
+ return IsDirSep (str[0]);
+}
+#endif
+
+constexpr char DIR_SEPARATOR = '/';
+
+constexpr char DOT_REPLACE = ','; // Replace . directories
+constexpr char COLON_REPLACE = '-'; // Replace : (partition char)
+constexpr char const REPO_DIR[] = "cmi.cache";
+
+namespace Cody {
+
+Resolver::~Resolver ()
+{
+}
+
+char const *Resolver::GetCMISuffix ()
+{
+ return "cmi";
+}
+
+std::string Resolver::GetCMIName (std::string const &module)
+{
+ std::string result;
+
+ result.reserve (module.size () + 8);
+ bool is_header = false;
+ bool is_abs = false;
+
+ if (IsAbsPath (module.c_str ()))
+ is_header = is_abs = true;
+ else if (module.front () == '.' && IsDirSep (module.c_str ()[1]))
+ is_header = true;
+
+ if (is_abs)
+ {
+ result.push_back ('.');
+ result.append (module);
+ }
+ else
+ result = std::move (module);
+
+ if (is_header)
+ {
+ if (!is_abs)
+ result[0] = DOT_REPLACE;
+
+ /* Map .. to DOT_REPLACE, DOT_REPLACE. */
+ for (size_t ix = 1; ; ix++)
+ {
+ ix = result.find ('.', ix);
+ if (ix == result.npos)
+ break;
+ if (ix + 2 > result.size ())
+ break;
+ if (result[ix + 1] != '.')
+ continue;
+ if (!IsDirSep (result[ix - 1]))
+ continue;
+ if (!IsDirSep (result[ix + 2]))
+ continue;
+ result[ix] = DOT_REPLACE;
+ result[ix + 1] = DOT_REPLACE;
+ }
+ }
+ else if (COLON_REPLACE != ':')
+ {
+ // There can only be one colon in a module name
+ auto colon = result.find (':');
+ if (colon != result.npos)
+ result[colon] = COLON_REPLACE;
+ }
+
+ if (char const *suffix = GetCMISuffix ())
+ {
+ result.push_back ('.');
+ result.append (suffix);
+ }
+
+ return result;
+}
+
+void Resolver::WaitUntilReady (Server *)
+{
+}
+
+Resolver *Resolver::ConnectRequest (Server *s, unsigned version,
+ std::string &, std::string &)
+{
+ if (version > Version)
+ s->ErrorResponse ("version mismatch");
+ else
+ s->ConnectResponse ("default");
+
+ return this;
+}
+
+int Resolver::ModuleRepoRequest (Server *s)
+{
+ s->PathnameResponse (REPO_DIR);
+ return 0;
+}
+
+// Deprecated resolver functions
+int Resolver::ModuleExportRequest (Server *s, Flags, std::string &module)
+{
+ auto cmi = GetCMIName (module);
+ s->PathnameResponse (cmi);
+ return 0;
+}
+
+int Resolver::ModuleImportRequest (Server *s, Flags, std::string &module)
+{
+ auto cmi = GetCMIName (module);
+ s->PathnameResponse (cmi);
+ return 0;
+}
+
+int Resolver::ModuleCompiledRequest (Server *s, Flags, std::string &)
+{
+ s->OKResponse ();
+ return 0;
+}
+
+int Resolver::IncludeTranslateRequest (Server *s, Flags, std::string &include)
+{
+ bool xlate = false;
+
+ // This is not the most efficient
+ auto cmi = GetCMIName (include);
+ struct stat statbuf;
+
+#if HAVE_FSTATAT
+ int fd_dir = open (REPO_DIR, O_RDONLY | O_CLOEXEC | O_DIRECTORY);
+ if (fd_dir >= 0
+ && fstatat (fd_dir, cmi.c_str (), &statbuf, 0) == 0
+ && S_ISREG (statbuf.st_mode))
+ // Sadly can't easily check if this process has read access,
+ // except by trying to open it.
+ xlate = true;
+ if (fd_dir >= 0)
+ close (fd_dir);
+#else
+ std::string append = REPO_DIR;
+ append.push_back (DIR_SEPARATOR);
+ append.append (cmi);
+ if (stat (append.c_str (), &statbuf) == 0
+ || S_ISREG (statbuf.st_mode))
+ xlate = true;
+#endif
+
+ if (xlate)
+ s->PathnameResponse (cmi);
+ else
+ s->BoolResponse (false);
+
+ return 0;
+}
+
+void Resolver::ErrorResponse (Server *server, std::string &&msg)
+{
+ server->ErrorResponse (msg);
+}
+
+}
diff --git a/libcody/server.cc b/libcody/server.cc
new file mode 100644
index 0000000..b9ceec4
--- /dev/null
+++ b/libcody/server.cc
@@ -0,0 +1,306 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Cody
+#include "internal.hh"
+// C++
+#include <tuple>
+// C
+#include <cerrno>
+#include <cstring>
+
+// Server code
+
+namespace Cody {
+
+// These do not need to be members
+static Resolver *ConnectRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+static int ModuleRepoRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+static int ModuleExportRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+static int ModuleImportRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+static int ModuleCompiledRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+static int IncludeTranslateRequest (Server *, Resolver *,
+ std::vector<std::string> &words);
+
+namespace {
+using RequestFn = int (Server *, Resolver *, std::vector<std::string> &);
+using RequestPair = std::tuple<char const *, RequestFn *>;
+static RequestPair
+ const requestTable[Detail::RC_HWM] =
+ {
+ // Same order as enum RequestCode
+ RequestPair {u8"HELLO", nullptr},
+ RequestPair {u8"MODULE-REPO", ModuleRepoRequest},
+ RequestPair {u8"MODULE-EXPORT", ModuleExportRequest},
+ RequestPair {u8"MODULE-IMPORT", ModuleImportRequest},
+ RequestPair {u8"MODULE-COMPILED", ModuleCompiledRequest},
+ RequestPair {u8"INCLUDE-TRANSLATE", IncludeTranslateRequest},
+ };
+}
+
+Server::Server (Resolver *r)
+ : resolver (r), direction (READING)
+{
+ PrepareToRead ();
+}
+
+Server::Server (Server &&src)
+ : write (std::move (src.write)),
+ read (std::move (src.read)),
+ resolver (src.resolver),
+ is_connected (src.is_connected),
+ direction (src.direction)
+{
+ fd.from = src.fd.from;
+ fd.to = src.fd.to;
+}
+
+Server::~Server ()
+{
+}
+
+Server &Server::operator= (Server &&src)
+{
+ write = std::move (src.write);
+ read = std::move (src.read);
+ resolver = src.resolver;
+ is_connected = src.is_connected;
+ direction = src.direction;
+ fd.from = src.fd.from;
+ fd.to = src.fd.to;
+
+ return *this;
+}
+
+void Server::DirectProcess (Detail::MessageBuffer &from,
+ Detail::MessageBuffer &to)
+{
+ read.PrepareToRead ();
+ std::swap (read, from);
+ ProcessRequests ();
+ resolver->WaitUntilReady (this);
+ write.PrepareToWrite ();
+ std::swap (to, write);
+}
+
+void Server::ProcessRequests (void)
+{
+ std::vector<std::string> words;
+
+ direction = PROCESSING;
+ while (!read.IsAtEnd ())
+ {
+ int err = 0;
+ unsigned ix = Detail::RC_HWM;
+ if (!read.Lex (words))
+ {
+ Assert (!words.empty ());
+ while (ix--)
+ {
+ if (words[0] != std::get<0> (requestTable[ix]))
+ continue; // not this one
+
+ if (ix == Detail::RC_CONNECT)
+ {
+ // CONNECT
+ if (IsConnected ())
+ err = -1;
+ else if (auto *r = ConnectRequest (this, resolver, words))
+ resolver = r;
+ else
+ err = -1;
+ }
+ else
+ {
+ if (!IsConnected ())
+ err = -1;
+ else if (int res = (std::get<1> (requestTable[ix])
+ (this, resolver, words)))
+ err = res;
+ }
+ break;
+ }
+ }
+
+ if (err || ix >= Detail::RC_HWM)
+ {
+ // Some kind of error
+ std::string msg;
+
+ if (err > 0)
+ msg = u8"error processing '";
+ else if (ix >= Detail::RC_HWM)
+ msg = u8"unrecognized '";
+ else if (IsConnected () && ix == Detail::RC_CONNECT)
+ msg = u8"already connected '";
+ else if (!IsConnected () && ix != Detail::RC_CONNECT)
+ msg = u8"not connected '";
+ else
+ msg = u8"malformed '";
+
+ read.LexedLine (msg);
+ msg.append (u8"'");
+ if (err > 0)
+ {
+ msg.append (u8" ");
+ msg.append (strerror (err));
+ }
+ resolver->ErrorResponse (this, std::move (msg));
+ }
+ }
+}
+
+// Return numeric value of STR as an unsigned. Returns ~0u on error
+// (so that value is not representable).
+static unsigned ParseUnsigned (std::string &str)
+{
+ char *eptr;
+ unsigned long val = strtoul (str.c_str (), &eptr, 10);
+ if (*eptr || unsigned (val) != val)
+ return ~0u;
+
+ return unsigned (val);
+}
+
+Resolver *ConnectRequest (Server *s, Resolver *r,
+ std::vector<std::string> &words)
+{
+ if (words.size () < 3 || words.size () > 4)
+ return nullptr;
+
+ if (words.size () == 3)
+ words.emplace_back (u8"");
+ unsigned version = ParseUnsigned (words[1]);
+ if (version == ~0u)
+ return nullptr;
+
+ return r->ConnectRequest (s, version, words[2], words[3]);
+}
+
+int ModuleRepoRequest (Server *s, Resolver *r,std::vector<std::string> &words)
+{
+ if (words.size () != 1)
+ return -1;
+
+ return r->ModuleRepoRequest (s);
+}
+
+int ModuleExportRequest (Server *s, Resolver *r, std::vector<std::string> &words)
+{
+ if (words.size () < 2 || words.size () > 3 || words[1].empty ())
+ return -1;
+
+ Flags flags = Flags::None;
+ if (words.size () == 3)
+ {
+ unsigned val = ParseUnsigned (words[2]);
+ if (val == ~0u)
+ return -1;
+ flags = Flags (val);
+ }
+
+ return r->ModuleExportRequest (s, flags, words[1]);
+}
+
+int ModuleImportRequest (Server *s, Resolver *r, std::vector<std::string> &words)
+{
+ if (words.size () < 2 || words.size () > 3 || words[1].empty ())
+ return -1;
+
+ Flags flags = Flags::None;
+ if (words.size () == 3)
+ {
+ unsigned val = ParseUnsigned (words[2]);
+ if (val == ~0u)
+ return -1;
+ flags = Flags (val);
+ }
+
+ return r->ModuleImportRequest (s, flags, words[1]);
+}
+
+int ModuleCompiledRequest (Server *s, Resolver *r,
+ std::vector<std::string> &words)
+{
+ if (words.size () < 2 || words.size () > 3 || words[1].empty ())
+ return -1;
+
+ Flags flags = Flags::None;
+ if (words.size () == 3)
+ {
+ unsigned val = ParseUnsigned (words[2]);
+ if (val == ~0u)
+ return -1;
+ flags = Flags (val);
+ }
+
+ return r->ModuleCompiledRequest (s, flags, words[1]);
+}
+
+int IncludeTranslateRequest (Server *s, Resolver *r,
+ std::vector<std::string> &words)
+{
+ if (words.size () < 2 || words.size () > 3 || words[1].empty ())
+ return -1;
+
+ Flags flags = Flags::None;
+ if (words.size () == 3)
+ {
+ unsigned val = ParseUnsigned (words[2]);
+ if (val == ~0u)
+ return -1;
+ flags = Flags (val);
+ }
+
+ return r->IncludeTranslateRequest (s, flags, words[1]);
+}
+
+void Server::ErrorResponse (char const *error, size_t elen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"ERROR");
+ write.AppendWord (error, true, elen);
+ write.EndLine ();
+}
+
+void Server::OKResponse ()
+{
+ write.BeginLine ();
+ write.AppendWord (u8"OK");
+ write.EndLine ();
+}
+
+void Server::ConnectResponse (char const *agent, size_t alen)
+{
+ is_connected = true;
+
+ write.BeginLine ();
+ write.AppendWord (u8"HELLO");
+ write.AppendInteger (Version);
+ write.AppendWord (agent, true, alen);
+ write.EndLine ();
+}
+
+void Server::PathnameResponse (char const *cmi, size_t clen)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"PATHNAME");
+ write.AppendWord (cmi, true, clen);
+ write.EndLine ();
+}
+
+void Server::BoolResponse (bool truthiness)
+{
+ write.BeginLine ();
+ write.AppendWord (u8"BOOL");
+ write.AppendWord (truthiness ? u8"TRUE" : u8"FALSE");
+ write.EndLine ();
+}
+
+}
diff --git a/libcody/tests/01-serialize/connect.cc b/libcody/tests/01-serialize/connect.cc
new file mode 100644
index 0000000..85277c8
--- /dev/null
+++ b/libcody/tests/01-serialize/connect.cc
@@ -0,0 +1,30 @@
+
+// Test client connection handshake
+// RUN: <<HELLO 1 TESTING
+// RUN: $subdir$stem | ezio -p OUT $test |& ezio -p ERR $test
+// RUN-END:
+
+// OUT-NEXT:^HELLO {:[0-9]+} TEST IDENT$
+// OUT-NEXT:$EOF
+
+// ERR-NEXT:Code:{:[0-9]+}$
+// ERR-NEXT:Version:1$
+// ERR-NEXT:$EOF
+
+
+// Cody
+#include "cody.hh"
+// C++
+#include <iostream>
+
+using namespace Cody;
+
+int main (int, char *[])
+{
+ Client client (0, 1);
+
+ auto token = client.Connect ("TEST", "IDENT");
+
+ std::cerr << "Code:" << token.GetCode () << '\n';
+ std::cerr << "Version:" << token.GetInteger () << '\n';
+}
diff --git a/libcody/tests/01-serialize/decoder.cc b/libcody/tests/01-serialize/decoder.cc
new file mode 100644
index 0000000..a3495d1
--- /dev/null
+++ b/libcody/tests/01-serialize/decoder.cc
@@ -0,0 +1,73 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// RUN: <<bob 'frob dob''\nF\_b\20\61\\'
+// RUN: $subdir$stem |& ezio $test
+// CHECK-NEXT: ^line:0 word:0 'bob'
+// CHECK-NEXT: ^line:0 word:1 'frob dob$
+// CHECK-OPTION: matchSpace
+// CHECK-NEXT: ^F b a\'$
+// CHECK-NEXT: $EOF
+
+/* RUN: <<line-1 word:1 ;
+ RUN: <<'line 2' ;
+ RUN: <<
+*/
+// RUN: $subdir$stem |& ezio -p CHECK2 $test
+// CHECK2-NEXT: line:0 word:0 'line-1'
+// CHECK2-NEXT: line:0 word:1 'word:1'
+// CHECK2-NEXT: line:1 word:0 'line 2'
+// CHECK2-NEXT: error:No
+// CHECK2-NEXT: $EOF
+
+// RUN: <<'
+// RUN: $subdir$stem |& ezio -p CHECK3 $test
+// CHECK3-NEXT: error:Invalid argument
+// CHECK3-NEXT: line:0 word:0 '''
+// CHECK3-NEXT: $EOF
+
+/* RUN: << ;
+ RUN: <<'\g'
+*/
+// RUN: $subdir$stem |& ezio -p CHECK4 $test
+// CHECK4-NEXT: error:No
+// CHECK4-NEXT: error:Invalid argument
+// CHECK4-NEXT: line:1 word:0 ''\g''
+// CHECK4-NEXT: $EOF
+
+// RUN-END:
+
+// Cody
+#include "cody.hh"
+// C++
+#include <iostream>
+// C
+#include <cstring>
+
+using namespace Cody;
+
+int main (int, char *[])
+{
+ Detail::MessageBuffer reader;
+
+ reader.PrepareToRead ();
+ while (int e = reader.Read (0))
+ if (e != EAGAIN && e != EINTR)
+ break;
+
+ std::vector<std::string> words;
+ for (unsigned line = 0; !reader.IsAtEnd (); line++)
+ {
+ if (int e = reader.Lex (words))
+ std::cerr << "error:" << strerror (e) << '\n';
+ for (unsigned ix = 0; ix != words.size (); ix++)
+ {
+ auto &word = words[ix];
+
+ std::cerr << "line:" << line << " word:" << ix
+ << " '" << word << "'\n";
+ }
+ }
+ return 0;
+}
diff --git a/libcody/tests/01-serialize/encoder.cc b/libcody/tests/01-serialize/encoder.cc
new file mode 100644
index 0000000..c4cab6c
--- /dev/null
+++ b/libcody/tests/01-serialize/encoder.cc
@@ -0,0 +1,48 @@
+// CODYlib -*- mode:c++ -*-
+// Copyright (C) 2020 Nathan Sidwell, nathan@acm.org
+// License: Apache v2.0
+
+// Test message encoding, both string quoting and continuation lines
+
+// RUN: $subdir$stem |& ezio $test
+// RUN-END:
+// The ¯ is utf8-encoded as c2 af
+// CHECK-NEXT: ^bob 'frob dob''\n¯\\'$
+// CHECK-NEXT: ^2 ;$
+// CHECK-NEXT: ^3$
+// CHECK-NEXT: $EOF
+
+// Cody
+#include "cody.hh"
+
+using namespace Cody;
+
+int main (int, char *[])
+{
+ Detail::MessageBuffer writer;
+
+ writer.BeginLine ();
+ writer.AppendWord ("bob");
+ writer.AppendWord ("frob dob", true);
+ writer.Append ("\n\xc2\xaf\\", true);
+ writer.EndLine ();
+
+ writer.PrepareToWrite ();
+ while (int err = writer.Write (2))
+ if (err != EAGAIN && err != EINTR)
+ break;
+
+ writer.BeginLine ();
+ writer.Append ("2", true);
+ writer.EndLine ();
+ writer.BeginLine ();
+ writer.Append ("3", true);
+ writer.EndLine ();
+
+ writer.PrepareToWrite ();
+ while (int err = writer.Write (2))
+ if (err != EAGAIN && err != EINTR)
+ break;
+
+ return 0;
+}
diff --git a/libcody/tests/02-comms/client-1.cc b/libcody/tests/02-comms/client-1.cc
new file mode 100644
index 0000000..edff0ab
--- /dev/null
+++ b/libcody/tests/02-comms/client-1.cc
@@ -0,0 +1,97 @@
+
+// Test client message round tripping
+/*
+ RUN: <<HELLO 1 TESTING ;
+ RUN: <<PATHNAME REPO ;
+ RUN: <<PATHNAME biz/bar ;
+ RUN: <<PATHNAME blob ;
+ RUN: <<BOOL FALSE ;
+ RUN: << BOOL TRUE ;
+ RUN: << PATHNAME foo ;
+ RUN: <<OK
+*/
+// RUN: $subdir$stem | ezio -p OUT $test |& ezio -p ERR $test
+// RUN-END:
+
+/*
+ OUT-NEXT:^HELLO {:[0-9]+} TEST IDENT ;$
+ OUT-NEXT:^MODULE-REPO ;
+ OUT-NEXT:^MODULE-EXPORT bar ;
+ OUT-NEXT:^MODULE-IMPORT foo ;
+ OUT-NEXT:^INCLUDE-TRANSLATE baz.frob ;
+ OUT-NEXT:^INCLUDE-TRANSLATE ./corge ;
+ OUT-NEXT:^INCLUDE-TRANSLATE ./quux ;
+ OUT-NEXT:^MODULE-COMPILED bar
+*/
+// OUT-NEXT:$EOF
+
+// ERR-NEXT:Code:1$
+// ERR-NEXT:Integer:1$
+// ERR-NEXT:Code:5$
+// ERR-NEXT:String:REPO$
+// ERR-NEXT:Code:5$
+// ERR-NEXT:String:biz/bar$
+// ERR-NEXT:Code:5$
+// ERR-NEXT:String:blob$
+// ERR-NEXT:Code:4$
+// ERR-NEXT:Integer:0$
+// ERR-NEXT:Code:4$
+// ERR-NEXT:Integer:1$
+// ERR-NEXT:Code:5$
+// ERR-NEXT:String:foo
+// ERR-NEXT:Code:3$
+// ERR-NEXT:Integer:
+// ERR-NEXT:$EOF
+
+
+// Cody
+#include "cody.hh"
+// C++
+#include <iostream>
+
+using namespace Cody;
+
+int main (int, char *[])
+{
+ Client client (0, 1);
+
+ client.Cork ();
+ if (client.Connect ("TEST", "IDENT").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.ModuleRepo ().GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.ModuleExport ("bar").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.ModuleImport ("foo").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.IncludeTranslate ("baz.frob").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.IncludeTranslate ("./corge").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.IncludeTranslate ("./quux").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+ if (client.ModuleCompiled ("bar").GetCode () != Client::PC_CORKED)
+ std::cerr << "Not corked!\n";
+
+ auto result = client.Uncork ();
+ for (auto iter = result.begin (); iter != result.end (); ++iter)
+ {
+ std::cerr << "Code:" << iter->GetCode () << '\n';
+ switch (iter->GetCategory ())
+ {
+ case Packet::INTEGER:
+ std::cerr << "Integer:" << iter->GetInteger () << '\n';
+ break;
+ case Packet::STRING:
+ std::cerr << "String:" << iter->GetString () << '\n';
+ break;
+ case Packet::VECTOR:
+ {
+ auto const &v = iter->GetVector ();
+ for (unsigned ix = 0; ix != v.size (); ix++)
+ std::cerr << "Vector[" << ix << "]:" << v[ix] << '\n';
+ }
+ break;
+ }
+ }
+}
diff --git a/libcody/tests/02-comms/pivot-1.cc b/libcody/tests/02-comms/pivot-1.cc
new file mode 100644
index 0000000..b98c833
--- /dev/null
+++ b/libcody/tests/02-comms/pivot-1.cc
@@ -0,0 +1,76 @@
+
+// Test resolver pivot
+
+// RUN:<<HELLO 1 TEST IDENT ;
+// RUN:<<MODULE-REPO ;
+// RUN:<<HELLO 1 TEST IDENT
+// RUN: $subdir$stem | ezio -p OUT1 $test |& ezio -p ERR1 $test
+// OUT1-NEXT:HELLO 1 default ;
+// OUT1-NEXT:PATHNAME cmi.cache ;
+// OUT1-NEXT:ERROR 'already connected
+// OUT1-NEXT:$EOF
+// ERR1-NEXT:resolver is handler
+// ERR1-NEXT:$EOF
+
+// RUN:<<MODULE-REPO ;
+// RUN:<<HELLO 1 TEST IDENT ;
+// RUN:<<MODULE-REPO
+// RUN: $subdir$stem | ezio -p OUT2 $test |& ezio -p ERR2 $test
+// OUT2-NEXT:ERROR 'not connected
+// OUT2-NEXT:HELLO 1 default ;
+// OUT2-NEXT:PATHNAME cmi.cache
+// OUT2-NEXT:$EOF
+// ERR2-NEXT:resolver is handler
+// ERR2-NEXT:$EOF
+
+// RUN-END:
+#include "cody.hh"
+#include <iostream>
+
+using namespace Cody;
+
+class Handler : public Resolver
+{
+ virtual Handler *ConnectRequest (Server *s, unsigned ,
+ std::string &, std::string &)
+ {
+ ErrorResponse (s, "unexpected connect call");
+ return nullptr;
+ }
+};
+
+Handler handler;
+
+class Initial : public Resolver
+{
+ virtual Handler *ConnectRequest (Server *s, unsigned v,
+ std::string &agent, std::string &ident)
+ {
+ Resolver::ConnectRequest (s, v, agent, ident);
+ return &handler;
+ }
+};
+
+Initial initial;
+
+int main (int, char *[])
+{
+ Server server (&initial, 0, 1);
+
+ while (int e = server.Read ())
+ if (e != EAGAIN && e != EINTR)
+ break;
+
+ server.ProcessRequests ();
+ if (server.GetResolver () == &handler)
+ std::cerr << "resolver is handler\n";
+ else if (server.GetResolver () == &initial)
+ std::cerr << "resolver is initial\n";
+ else
+ std::cerr << "resolver is surprising\n";
+
+ server.PrepareToWrite ();
+ while (int e = server.Write ())
+ if (e != EAGAIN && e != EINTR)
+ break;
+}
diff --git a/libcody/tests/02-comms/server-1.cc b/libcody/tests/02-comms/server-1.cc
new file mode 100644
index 0000000..0a8694e
--- /dev/null
+++ b/libcody/tests/02-comms/server-1.cc
@@ -0,0 +1,68 @@
+
+// Test server message round tripping
+/*
+ RUN:<<HELLO 1 TEST IDENT ;
+ RUN:<<MODULE-REPO ;
+ RUN:<<MODULE-EXPORT bar ;
+ RUN:<<MODULE-IMPORT foo ;
+ RUN:<<NOT A COMMAND ;
+ RUN:<<INCLUDE-TRANSLATE baz.frob ;
+ RUN:<<INCLUDE-TRANSLATE ./quux ;
+ RUN:<<MODULE-COMPILED bar ;
+ RUN:<<MODULE-IMPORT ''
+*/
+// RUN: $subdir$stem | ezio -p OUT1 $test |& ezio -p ERR1 $test
+
+// These all fail because there's nothing in the server interpretting stuff
+/*
+ OUT1-NEXT: ^HELLO 1 default ;
+ OUT1-NEXT: ^PATHNAME cmi.cache ;
+ OUT1-NEXT: ^PATHNAME bar.cmi ;
+ OUT1-NEXT: ^PATHNAME foo.cmi ;
+ OUT1-NEXT: ^ERROR 'unrecognized \'NOT
+ OUT1-NEXT: ^BOOL FALSE ;
+ OUT1-NEXT: ^BOOL FALSE ;
+ OUT1-NEXT: ^OK
+ OUT1-NEXT: ^ERROR 'malformed
+*/
+// OUT1-NEXT:$EOF
+// ERR1-NEXT:$EOF
+
+/*
+ RUN:<<HELLO 1 TEST IDENT
+ RUN:<<MODULE-REPO
+*/
+// RUN: $subdir$stem | ezio -p OUT2 $test |& ezio -p ERR2 $test
+/*
+ OUT2-NEXT: ^HELLO 1 default
+*/
+// OUT2-NEXT:$EOF
+// ERR2-NEXT:$EOF
+
+// RUN-END:
+
+// Cody
+#include "cody.hh"
+// C++
+#include <iostream>
+
+using namespace Cody;
+
+int main (int, char *[])
+{
+ Resolver r;
+ Server server (&r, 0, 1);
+
+ while (int e = server.Read ())
+ if (e != EAGAIN && e != EINTR)
+ break;
+
+ server.ProcessRequests ();
+ if (server.GetResolver () != &r)
+ std::cerr << "resolver changed\n";
+ server.PrepareToWrite ();
+
+ while (int e = server.Write ())
+ if (e != EAGAIN && e != EINTR)
+ break;
+}
diff --git a/libcody/tests/Makesub.in b/libcody/tests/Makesub.in
new file mode 100644
index 0000000..329e946
--- /dev/null
+++ b/libcody/tests/Makesub.in
@@ -0,0 +1,36 @@
+# CODYlib -*- mode:Makefile -*-
+# Copyright (C) 2019-2020 Nathan Sidwell, nathan@acm.org
+# License: Apache v2.0
+
+ALOY := @ALOY@
+TESTS := $(patsubst $(srcdir)/%.cc,%,\
+ $(wildcard $(srcdir)/tests/*/*.cc))
+TESTDIRS = $(shell cd $(srcdir)/${<D} ; echo *(/))
+testdir := $(and $(filter-out /%,$(srcdir)),../)$(srcdir)/tests
+
+check:: tests/cody.defs $(TESTS)
+ +cd ${<D} && srcbuilddir=$(srcdir)/tests JOUST=${<F} \
+ $(ALOY) -t kratos -o cody -g $(testdir)/jouster $(TESTDIRS)
+ifeq ($(firstword $(aloy)),:)
+ @echo WARNING: tests were not run as Joust test harness was not found
+endif
+
+tests/cody.defs: tests/Makesub
+ echo '# Automatically generated by Make' >$@
+ echo 'testdir=$(testdir)' >>$@
+ echo 'timelimit=60' >>$@
+ echo 'memlimit=1' >>$@
+ echo 'cpulimit=60' >>$@
+ echo 'filelimit=1' >>$@
+ echo 'SHELL=$(SHELL)' >>$@
+
+$(TESTS): %: %.o libcody.a
+ $(CXX) $(LDFLAGS) $< -lcody $(LIBS) -o $@
+
+clean::
+ rm -f $(TESTS)
+ rm -f $(TESTS:=.o) $(TESTS:=.d)
+
+ifeq ($(filter clean%,$(MAKECMDGOALS)),)
+-include $(TESTS:=.d)
+endif
diff --git a/libcody/tests/jouster b/libcody/tests/jouster
new file mode 100755
index 0000000..c7a913c
--- /dev/null
+++ b/libcody/tests/jouster
@@ -0,0 +1,11 @@
+#! /bin/zsh
+# CODYlib -*- mode:Makefile -*-
+# Copyright (C) 2019-2020 Nathan Sidwell, nathan@acm.org
+# License Apache v2.0
+
+pushd ${0%/*}
+setopt nullglob
+for subdir in $@ ; do
+ echo $subdir/*(.^*)
+done
+popd