aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--.github/workflows/cmake.yml20
-rw-r--r--.gitignore4
-rw-r--r--Doxyfile2894
-rw-r--r--LICENSE12
-rw-r--r--README.md43
-rwxr-xr-xbuild-aux/ar-lib271
-rwxr-xr-xbuild-aux/compile348
-rwxr-xr-xbuild-aux/config.guess1807
-rwxr-xr-xbuild-aux/config.sub1960
-rwxr-xr-xbuild-aux/install-sh541
-rw-r--r--build-aux/ltmain.sh11338
-rwxr-xr-xbuild-aux/missing215
-rwxr-xr-xconfigure4
-rw-r--r--docs/C.md43
-rw-r--r--include/locusts.h13
-rw-r--r--meson.build4
-rw-r--r--src/locusts.c3
17 files changed, 19504 insertions, 16 deletions
diff --git a/.github/workflows/cmake.yml b/.github/workflows/cmake.yml
new file mode 100644
index 0000000..e1b9cb8
--- /dev/null
+++ b/.github/workflows/cmake.yml
@@ -0,0 +1,20 @@
+name: CMake Builds
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ build:
+ runs-on: $ {{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, macos-latest, windows-latest]
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Configure CMake
+ run: cmake -B build -S .
+ - name: Build
+ run: cmake --build build
diff --git a/.gitignore b/.gitignore
index 696818a..d7224f1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -105,10 +105,8 @@ m4/lt~obsolete.m4
# can automatically generate from config.status script
# (which is called by configure script))
GNUmakefile
-
-/build-aux
-*~
ocusts.pc
+*~
# CMake
build/**/*
diff --git a/Doxyfile b/Doxyfile
new file mode 100644
index 0000000..ecd5142
--- /dev/null
+++ b/Doxyfile
@@ -0,0 +1,2894 @@
+# Doxyfile 1.10.0
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+#
+# Note:
+#
+# Use doxygen to compare the used configuration file with the template
+# configuration file:
+# doxygen -x [configFile]
+# Use doxygen to compare the used configuration file with the template
+# configuration file without replacing the environment variables or CMake type
+# replacement variables:
+# doxygen -x_noenv [configFile]
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the configuration
+# file that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME = "liboCusts"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER = "1.0.0"
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF = "A completely normal C library"
+
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
+
+PROJECT_LOGO =
+
+# With the PROJECT_ICON tag one can specify an icon that is included in the tabs
+# when the HTML document is shown. Doxygen will copy the logo to the output
+# directory.
+
+PROJECT_ICON =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY =
+
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096
+# sub-directories (in 2 levels) under the output directory of each output format
+# and will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system. Adapt CREATE_SUBDIRS_LEVEL to
+# control the number of sub-directories.
+# The default value is: NO.
+
+CREATE_SUBDIRS = NO
+
+# Controls the number of sub-directories that will be created when
+# CREATE_SUBDIRS tag is set to YES. Level 0 represents 16 directories, and every
+# level increment doubles the number of directories, resulting in 4096
+# directories at level 8 which is the default and also the maximum value. The
+# sub-directories are organized in 2 levels, the first level always has a fixed
+# number of 16 directories.
+# Minimum value: 0, maximum value: 8, default value: 8.
+# This tag requires that the tag CREATE_SUBDIRS is set to YES.
+
+CREATE_SUBDIRS_LEVEL = 8
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Bulgarian,
+# Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, Dutch, English
+# (United States), Esperanto, Farsi (Persian), Finnish, French, German, Greek,
+# Hindi, Hungarian, Indonesian, Italian, Japanese, Japanese-en (Japanese with
+# English messages), Korean, Korean-en (Korean with English messages), Latvian,
+# Lithuanian, Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese,
+# Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish,
+# Swedish, Turkish, Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF = "The $name class" \
+ "The $name widget" \
+ "The $name file" \
+ is \
+ provides \
+ specifies \
+ contains \
+ represents \
+ a \
+ an \
+ the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line
+# such as
+# /***************
+# as being the beginning of a Javadoc-style comment "banner". If set to NO, the
+# Javadoc-style will behave just like regular comments and it will not be
+# interpreted by doxygen.
+# The default value is: NO.
+
+JAVADOC_BANNER = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# By default Python docstrings are displayed as preformatted text and doxygen's
+# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the
+# doxygen's special commands can be used and the contents of the docstring
+# documentation blocks is shown as doxygen documentation.
+# The default value is: YES.
+
+PYTHON_DOCSTRING = YES
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:^^"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". Note that you cannot put \n's in the value part of an alias
+# to insert newlines (in the resulting output). You can put ^^ in the value part
+# of an alias to insert a newline as if a physical newline was in the original
+# file. When you need a literal { or } or , in the value part of an alias you
+# have to escape them by means of a backslash (\), this can lead to conflicts
+# with the commands \{ and \} for these it is advised to use the version @{ and
+# @} or use a double escape (\\{ and \\})
+
+ALIASES =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
+# sources only. Doxygen will then generate output that is more tailored for that
+# language. For instance, namespaces will be presented as modules, types will be
+# separated into more groups, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_SLICE = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
+# Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice,
+# VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
+# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
+# tries to guess whether the code is fixed or free formatted code, this is the
+# default for Fortran type files). For instance to make doxygen treat .inc files
+# as Fortran files (default is PHP), and .f files as C (default is Fortran),
+# use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen. When specifying no_extension you should add
+# * to the FILE_PATTERNS.
+#
+# Note see also the list of default file extension mappings.
+
+EXTENSION_MAPPING =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See https://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT = YES
+
+# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
+# to that level are automatically included in the table of contents, even if
+# they do not have an id attribute.
+# Note: This feature currently applies only to Markdown headings.
+# Minimum value: 0, maximum value: 99, default value: 5.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
+
+TOC_INCLUDE_HEADINGS = 5
+
+# The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to
+# generate identifiers for the Markdown headings. Note: Every identifier is
+# unique.
+# Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a
+# sequence number starting at 0 and GITHUB use the lower case version of title
+# with any whitespace replaced by '-' and punctuation characters removed.
+# The default value is: DOXYGEN.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
+
+MARKDOWN_ID_STYLE = DOXYGEN
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE = 0
+
+# The NUM_PROC_THREADS specifies the number of threads doxygen is allowed to use
+# during processing. When set to 0 doxygen will based this on the number of
+# cores available in the system. You can set it explicitly to a value larger
+# than 0 to get more control over the balance between CPU load and processing
+# speed. At this moment only the input processing can be done using multiple
+# threads. Since this is still an experimental feature the default is set to 1,
+# which effectively disables parallel processing. Please report any issues you
+# encounter. Generating dot graphs in parallel is controlled by the
+# DOT_NUM_THREADS setting.
+# Minimum value: 0, maximum value: 32, default value: 1.
+
+NUM_PROC_THREADS = 1
+
+# If the TIMESTAMP tag is set different from NO then each generated page will
+# contain the date or date and time when the page was generated. Setting this to
+# NO can help when comparing the output of multiple runs.
+# Possible values are: YES, NO, DATETIME and DATE.
+# The default value is: NO.
+
+TIMESTAMP = NO
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual
+# methods of a class will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIV_VIRTUAL = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If this flag is set to YES, the name of an unnamed parameter in a declaration
+# will be determined by the corresponding definition. By default unnamed
+# parameters remain unnamed in the output.
+# The default value is: YES.
+
+RESOLVE_UNNAMED_PARAMS = YES
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# will also hide undocumented C++ concepts if enabled. This option has no effect
+# if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# declarations. If set to NO, these declarations will be included in the
+# documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS = NO
+
+# With the correct setting of option CASE_SENSE_NAMES doxygen will better be
+# able to match the capabilities of the underlying filesystem. In case the
+# filesystem is case sensitive (i.e. it supports files in the same directory
+# whose names only differ in casing), the option must be set to YES to properly
+# deal with such files in case they appear in the input. For filesystems that
+# are not case sensitive the option should be set to NO to properly deal with
+# output files written for symbols that only differ in casing, such as for two
+# classes, one named CLASS and the other named Class, and to also support
+# references to files without having to specify the exact matching casing. On
+# Windows (including Cygwin) and MacOS, users should typically set this option
+# to NO, whereas on Linux or other Unix flavors it should typically be set to
+# YES.
+# Possible values are: SYSTEM, NO and YES.
+# The default value is: SYSTEM.
+
+CASE_SENSE_NAMES = SYSTEM
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_HEADERFILE tag is set to YES then the documentation for a class
+# will show which file needs to be included to use the class.
+# The default value is: YES.
+
+SHOW_HEADERFILE = YES
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
+# list will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file. See also section "Changing the
+# layout of pages" for information.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as documenting some parameters in
+# a documented function twice, or documenting parameters that don't exist or
+# using markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR = YES
+
+# If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete
+# function parameter documentation. If set to NO, doxygen will accept that some
+# parameters have no documentation without warning.
+# The default value is: YES.
+
+WARN_IF_INCOMPLETE_DOC = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong parameter
+# documentation, but not about the absence of documentation. If EXTRACT_ALL is
+# set to YES then this flag will automatically be disabled. See also
+# WARN_IF_INCOMPLETE_DOC
+# The default value is: NO.
+
+WARN_NO_PARAMDOC = NO
+
+# If WARN_IF_UNDOC_ENUM_VAL option is set to YES, doxygen will warn about
+# undocumented enumeration values. If set to NO, doxygen will accept
+# undocumented enumeration values. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: NO.
+
+WARN_IF_UNDOC_ENUM_VAL = NO
+
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
+# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
+# at the end of the doxygen process doxygen will return with a non-zero status.
+# If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves
+# like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not
+# write the warning messages in between other messages but write them at the end
+# of a run, in case a WARN_LOGFILE is defined the warning messages will be
+# besides being in the defined file also be shown at the end of a run, unless
+# the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case
+# the behavior will remain as with the setting FAIL_ON_WARNINGS.
+# Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT.
+# The default value is: NO.
+
+WARN_AS_ERROR = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# See also: WARN_LINE_FORMAT
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT = "$file:$line: $text"
+
+# In the $text part of the WARN_FORMAT command it is possible that a reference
+# to a more specific place is given. To make it easier to jump to this place
+# (outside of doxygen) the user can define a custom "cut" / "paste" string.
+# Example:
+# WARN_LINE_FORMAT = "'vi $file +$line'"
+# See also: WARN_FORMAT
+# The default value is: at line $line of file $file.
+
+WARN_LINE_FORMAT = "at line $line of file $file"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr). In case the file specified cannot be opened for writing the
+# warning and error messages are written to standard error. When as file - is
+# specified the warning and error messages are written to standard output
+# (stdout).
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+INPUT = include/ docs/C.md
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see:
+# https://www.gnu.org/software/libiconv/) for the list of possible encodings.
+# See also: INPUT_FILE_ENCODING
+# The default value is: UTF-8.
+
+INPUT_ENCODING = UTF-8
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses The INPUT_FILE_ENCODING tag can be used to specify
+# character encoding on a per file pattern basis. Doxygen will compare the file
+# name with each pattern and apply the encoding instead of the default
+# INPUT_ENCODING) if there is a match. The character encodings are a list of the
+# form: pattern=encoding (like *.php=ISO-8859-1). See cfg_input_encoding
+# "INPUT_ENCODING" for further information on supported encodings.
+
+INPUT_FILE_ENCODING =
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# Note the list of default checked file patterns might differ from the list of
+# default file extension mappings.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm,
+# *.cpp, *.cppm, *.ccm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl,
+# *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d,
+# *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to
+# be provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
+# *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
+
+FILE_PATTERNS = *.c \
+ *.cc \
+ *.cxx \
+ *.cxxm \
+ *.cpp \
+ *.cppm \
+ *.ccm \
+ *.c++ \
+ *.c++m \
+ *.java \
+ *.ii \
+ *.ixx \
+ *.ipp \
+ *.i++ \
+ *.inl \
+ *.idl \
+ *.ddl \
+ *.odl \
+ *.h \
+ *.hh \
+ *.hxx \
+ *.hpp \
+ *.h++ \
+ *.ixx \
+ *.l \
+ *.cs \
+ *.d \
+ *.php \
+ *.php4 \
+ *.php5 \
+ *.phtml \
+ *.inc \
+ *.m \
+ *.markdown \
+ *.md \
+ *.mm \
+ *.dox \
+ *.py \
+ *.pyw \
+ *.f90 \
+ *.f95 \
+ *.f03 \
+ *.f08 \
+ *.f18 \
+ *.f \
+ *.for \
+ *.vhd \
+ *.vhdl \
+ *.ucf \
+ *.qsf \
+ *.ice
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# ANamespace::AClass, ANamespace::*Test
+
+EXCLUDE_SYMBOLS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that doxygen will use the data processed and written to standard output
+# for further processing, therefore nothing else, like debug statements or used
+# commands (so in case of a Windows batch file always use @echo OFF), should be
+# written to standard output.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE = C.md
+
+# The Fortran standard specifies that for fixed formatted Fortran code all
+# characters from position 72 are to be considered as comment. A common
+# extension is to allow longer lines before the automatic comment starts. The
+# setting FORTRAN_COMMENT_AFTER will also make it possible that longer lines can
+# be processed before the automatic comment starts.
+# Minimum value: 7, maximum value: 10000, default value: 72.
+
+FORTRAN_COMMENT_AFTER = 72
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# multi-line macros, enums or list initialized variables directly into the
+# documentation.
+# The default value is: NO.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# entity all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see https://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS = YES
+
+# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
+# clang parser (see:
+# http://clang.llvm.org/) for more accurate parsing at the cost of reduced
+# performance. This can be particularly helpful with template rich C++ code for
+# which doxygen's built-in parser lacks the necessary type information.
+# Note: The availability of this option depends on whether or not doxygen was
+# generated with the -Duse_libclang=ON option for CMake.
+# The default value is: NO.
+
+CLANG_ASSISTED_PARSING = NO
+
+# If the CLANG_ASSISTED_PARSING tag is set to YES and the CLANG_ADD_INC_PATHS
+# tag is set to YES then doxygen will add the directory of each input to the
+# include path.
+# The default value is: YES.
+# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
+
+CLANG_ADD_INC_PATHS = YES
+
+# If clang assisted parsing is enabled you can provide the compiler with command
+# line options that you would normally use when invoking the compiler. Note that
+# the include paths will already be set by doxygen for the files and directories
+# specified with INPUT and INCLUDE_PATH.
+# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
+
+CLANG_OPTIONS =
+
+# If clang assisted parsing is enabled you can provide the clang parser with the
+# path to the directory containing a file called compile_commands.json. This
+# file is the compilation database (see:
+# http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the
+# options used when the source files were built. This is equivalent to
+# specifying the -p option to a clang tool, such as clang-check. These options
+# will then be passed to the parser. Any options specified with CLANG_OPTIONS
+# will be added as well.
+# Note: The availability of this option depends on whether or not doxygen was
+# generated with the -Duse_libclang=ON option for CMake.
+
+CLANG_DATABASE_PATH =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX = YES
+
+# The IGNORE_PREFIX tag can be used to specify a prefix (or a list of prefixes)
+# that should be ignored while generating the index headers. The IGNORE_PREFIX
+# tag works for classes, function and member names. The entity will be placed in
+# the alphabetical list under the first letter of the entity name that remains
+# after removing the prefix.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# Note: Since the styling of scrollbars can currently not be overruled in
+# Webkit/Chromium, the styling will be left out of the default doxygen.css if
+# one or more extra stylesheets have been specified. So if scrollbar
+# customization is desired it has to be added explicitly. For an example see the
+# documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE tag can be used to specify if the generated HTML output
+# should be rendered with a dark or light theme.
+# Possible values are: LIGHT always generate light mode output, DARK always
+# generate dark mode output, AUTO_LIGHT automatically set the mode according to
+# the user preference, use light mode if no preference is set (the default),
+# AUTO_DARK automatically set the mode according to the user preference, use
+# dark mode if no preference is set and TOGGLE allow to user to switch between
+# light and dark mode via a button.
+# The default value is: AUTO_LIGHT.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE = AUTO_LIGHT
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a color-wheel, see
+# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use gray-scales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
+# documentation will contain a main index with vertical navigation menus that
+# are dynamically created via JavaScript. If disabled, the navigation index will
+# consists of multiple levels of tabs that are statically embedded in every HTML
+# page. Disable this option to support browsers that do not have JavaScript,
+# like the Qt help browser.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_MENUS = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be
+# dynamically folded and expanded in the generated HTML source code.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_CODE_FOLDING = YES
+
+# If the HTML_COPY_CLIPBOARD tag is set to YES then doxygen will show an icon in
+# the top right corner of code and text fragments that allows the user to copy
+# its content to the clipboard. Note this only works if supported by the browser
+# and the web page is served via a secure context (see:
+# https://www.w3.org/TR/secure-contexts/), i.e. using the https: or file:
+# protocol.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COPY_CLIPBOARD = YES
+
+# Doxygen stores a couple of settings persistently in the browser (via e.g.
+# cookies). By default these settings apply to all HTML pages generated by
+# doxygen across all projects. The HTML_PROJECT_COOKIE tag can be used to store
+# the settings under a project specific key, such that the user preferences will
+# be stored separately.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_PROJECT_COOKIE =
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see:
+# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To
+# create a documentation set, doxygen will generate a Makefile in the HTML
+# output directory. Running make will produce the docset in that directory and
+# running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
+# genXcode/_index.html for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# This tag determines the URL of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDURL =
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# on Windows. In the beginning of 2021 Microsoft took the original page, with
+# a.o. the download links, offline the HTML help workshop was already many years
+# in maintenance mode). You can download the HTML help workshop from the web
+# archives at Installation executable (see:
+# http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo
+# ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe).
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the main .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND = NO
+
+# The SITEMAP_URL tag is used to specify the full URL of the place where the
+# generated documentation will be placed on the server by the user during the
+# deployment of the documentation. The generated sitemap is called sitemap.xml
+# and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL
+# is specified no sitemap is generated. For information about the sitemap
+# protocol see https://www.sitemaps.org
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SITEMAP_URL =
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see:
+# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see:
+# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see:
+# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see:
+# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS =
+
+# The QHG_LOCATION tag can be used to specify the location (absolute path
+# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to
+# run qhelpgenerator on the generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine tune the look of the index (see "Fine-tuning the output"). As an
+# example, the default style sheet generated by doxygen has an example that
+# shows how to put an image at the root of the tree instead of the PROJECT_NAME.
+# Since the tree basically has the same information as the tab index, you could
+# consider setting DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW = NO
+
+# When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the
+# FULL_SIDEBAR option determines if the side bar is limited to only the treeview
+# area (value NO) or if it should extend to the full height of the window (value
+# YES). Setting this to YES gives a layout similar to
+# https://docs.readthedocs.io with more room for contents, but less room for the
+# project logo, title, and description. If either GENERATE_TREEVIEW or
+# DISABLE_INDEX is set to NO, this option has no effect.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FULL_SIDEBAR = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH = 250
+
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# If the OBFUSCATE_EMAILS tag is set to YES, doxygen will obfuscate email
+# addresses.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+OBFUSCATE_EMAILS = YES
+
+# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
+# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
+# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
+# the HTML output. These images will generally look nicer at scaled resolutions.
+# Possible values are: png (the default) and svg (looks nicer but requires the
+# pdf2svg or inkscape tool).
+# The default value is: png.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FORMULA_FORMAT = png
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE = 10
+
+# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
+# to create new LaTeX commands to be used in formulas as building blocks. See
+# the section "Including formulas" for details.
+
+FORMULA_MACROFILE =
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# https://www.mathjax.org) which uses client side JavaScript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX = NO
+
+# With MATHJAX_VERSION it is possible to specify the MathJax version to be used.
+# Note that the different versions of MathJax have different requirements with
+# regards to the different settings, so it is possible that also other MathJax
+# settings have to be changed when switching between the different MathJax
+# versions.
+# Possible values are: MathJax_2 and MathJax_3.
+# The default value is: MathJax_2.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_VERSION = MathJax_2
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. For more details about the output format see MathJax
+# version 2 (see:
+# http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3
+# (see:
+# http://docs.mathjax.org/en/latest/web/components/output.html).
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility. This is the name for Mathjax version 2, for MathJax version 3
+# this will be translated into chtml), NativeMML (i.e. MathML. Only supported
+# for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This
+# is the name for Mathjax version 3, for MathJax version 2 this will be
+# translated into HTML-CSS) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from https://www.mathjax.org before deployment. The default value is:
+# - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2
+# - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH =
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# for MathJax version 2 (see
+# https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions):
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# For example for MathJax version 3 (see
+# http://docs.mathjax.org/en/latest/input/tex/extensions/index.html):
+# MATHJAX_EXTENSIONS = ams
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see:
+# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using JavaScript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see:
+# https://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see:
+# https://xapian.org/). See the section "External Indexing and Searching" for
+# details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when not enabling USE_PDFLATEX the default is latex when enabling
+# USE_PDFLATEX the default is pdflatex and when in the later case latex is
+# chosen this is overwritten by pdflatex. For specific output languages the
+# default can have been set differently, this depends on the implementation of
+# the output language.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME =
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# Note: This tag is used in the Makefile / make.bat.
+# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file
+# (.tex).
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to
+# generate index for LaTeX. In case there is no backslash (\) as first character
+# it will be automatically added in the LaTeX code.
+# Note: This tag is used in the generated output file (.tex).
+# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat.
+# The default value is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_MAKEINDEX_CMD = makeindex
+
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a user-defined LaTeX header for
+# the generated LaTeX document. The header should contain everything until the
+# first chapter. If it is left blank doxygen will generate a standard header. It
+# is highly recommended to start with a default header using
+# doxygen -w latex new_header.tex new_footer.tex new_stylesheet.sty
+# and then modify the file new_header.tex. See also section "Doxygen usage" for
+# information on how to generate the default header that doxygen normally uses.
+#
+# Note: Only use a user-defined header if you know what you are doing!
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. The following
+# commands have a special meaning inside the header (and footer): For a
+# description of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a user-defined LaTeX footer for
+# the generated LaTeX document. The footer should contain everything after the
+# last chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer. See also section "Doxygen
+# usage" for information on how to generate the default footer that doxygen
+# normally uses. Note: Only use a user-defined footer if you know what you are
+# doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_STYLESHEET =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as
+# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX
+# files. Set this option to YES, to get a higher quality PDF documentation.
+#
+# See also section LATEX_CMD_NAME for selecting the engine.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX = YES
+
+# The LATEX_BATCHMODE tag signals the behavior of LaTeX in case of an error.
+# Possible values are: NO same as ERROR_STOP, YES same as BATCH, BATCH In batch
+# mode nothing is printed on the terminal, errors are scrolled as if <return> is
+# hit at every error; missing files that TeX tries to input or request from
+# keyboard input (\read on a not open input stream) cause the job to abort,
+# NON_STOP In nonstop mode the diagnostic message will appear on the terminal,
+# but there is no possibility of user interaction just like in batch mode,
+# SCROLL In scroll mode, TeX will stop only for missing files to input or if
+# keyboard input is necessary and ERROR_STOP In errorstop mode, TeX will stop at
+# each error, asking for user intervention.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# https://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE = plain
+
+# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
+# path from which the emoji images will be read. If a relative path is entered,
+# it will be relative to the LATEX_OUTPUT directory. If left blank the
+# LATEX_OUTPUT directory will be used.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EMOJI_DIRECTORY =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# configuration file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's configuration file. A template extensions file can be
+# generated using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN = YES
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING = YES
+
+# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include
+# namespace members in file scope as well, matching the HTML output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_NS_MEMB_FILE_SCOPE = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT = docbook
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see https://autogen.sourceforge.net/) file that captures
+# the structure of the code including all documentation. Note that this feature
+# is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to Sqlite3 output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_SQLITE3 tag is set to YES doxygen will generate a Sqlite3
+# database with symbols found by doxygen stored in tables.
+# The default value is: NO.
+
+GENERATE_SQLITE3 = NO
+
+# The SQLITE3_OUTPUT tag is used to specify where the Sqlite3 database will be
+# put. If a relative path is entered the value of OUTPUT_DIRECTORY will be put
+# in front of it.
+# The default directory is: sqlite3.
+# This tag requires that the tag GENERATE_SQLITE3 is set to YES.
+
+SQLITE3_OUTPUT = sqlite3
+
+# The SQLITE3_RECREATE_DB tag is set to YES, the existing doxygen_sqlite3.db
+# database file will be recreated with each doxygen run. If set to NO, doxygen
+# will warn if a database file is already found and not modify it.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_SQLITE3 is set to YES.
+
+SQLITE3_RECREATE_DB = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of
+# RECURSIVE has no effect here.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES, all external classes and namespaces
+# will be listed in the class and namespace index. If set to NO, only the
+# inherited external classes will be listed.
+# The default value is: NO.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the topic index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to diagram generator tools
+#---------------------------------------------------------------------------
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# https://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS = 0
+
+# DOT_COMMON_ATTR is common attributes for nodes, edges and labels of
+# subgraphs. When you want a differently looking font in the dot files that
+# doxygen generates you can specify fontname, fontcolor and fontsize attributes.
+# For details please see <a href=https://graphviz.org/doc/info/attrs.html>Node,
+# Edge and Graph Attributes specification</a> You need to make sure dot is able
+# to find the font, which can be done by putting it in a standard location or by
+# setting the DOTFONTPATH environment variable or by setting DOT_FONTPATH to the
+# directory containing the font. Default graphviz fontsize is 14.
+# The default value is: fontname=Helvetica,fontsize=10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_COMMON_ATTR = "fontname=Helvetica,fontsize=10"
+
+# DOT_EDGE_ATTR is concatenated with DOT_COMMON_ATTR. For elegant style you can
+# add 'arrowhead=open, arrowtail=open, arrowsize=0.5'. <a
+# href=https://graphviz.org/doc/info/arrows.html>Complete documentation about
+# arrows shapes.</a>
+# The default value is: labelfontname=Helvetica,labelfontsize=10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_EDGE_ATTR = "labelfontname=Helvetica,labelfontsize=10"
+
+# DOT_NODE_ATTR is concatenated with DOT_COMMON_ATTR. For view without boxes
+# around nodes set 'shape=plain' or 'shape=plaintext' <a
+# href=https://www.graphviz.org/doc/info/shapes.html>Shapes specification</a>
+# The default value is: shape=box,height=0.2,width=0.4.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
+
+# You can set the path where dot can find font specified with fontname in
+# DOT_COMMON_ATTR and others dot attributes.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH tag is set to YES or GRAPH or BUILTIN then doxygen will
+# generate a graph for each documented class showing the direct and indirect
+# inheritance relations. In case the CLASS_GRAPH tag is set to YES or GRAPH and
+# HAVE_DOT is enabled as well, then dot will be used to draw the graph. In case
+# the CLASS_GRAPH tag is set to YES and HAVE_DOT is disabled or if the
+# CLASS_GRAPH tag is set to BUILTIN, then the built-in generator will be used.
+# If the CLASS_GRAPH tag is set to TEXT the direct and indirect inheritance
+# relations will be shown as texts / links. Explicit enabling an inheritance
+# graph or choosing a different representation for an inheritance graph of a
+# specific class, can be accomplished by means of the command \inheritancegraph.
+# Disabling an inheritance graph can be accomplished by means of the command
+# \hideinheritancegraph.
+# Possible values are: NO, YES, TEXT, GRAPH and BUILTIN.
+# The default value is: YES.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes. Explicit enabling a collaboration graph,
+# when COLLABORATION_GRAPH is set to NO, can be accomplished by means of the
+# command \collaborationgraph. Disabling a collaboration graph can be
+# accomplished by means of the command \hidecollaborationgraph.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies. Explicit enabling a group
+# dependency graph, when GROUP_GRAPHS is set to NO, can be accomplished by means
+# of the command \groupgraph. Disabling a directory graph can be accomplished by
+# means of the command \hidegroupgraph. See also the chapter Grouping in the
+# manual.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag UML_LOOK is set to YES.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and
+# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS
+# tag is set to YES, doxygen will add type and arguments for attributes and
+# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen
+# will not generate fields with class member information in the UML graphs. The
+# class diagrams will look similar to the default class diagrams but using UML
+# notation for the relationships.
+# Possible values are: NO, YES and NONE.
+# The default value is: NO.
+# This tag requires that the tag UML_LOOK is set to YES.
+
+DOT_UML_DETAILS = NO
+
+# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
+# to display on a single line. If the actual line length exceeds this threshold
+# significantly it will be wrapped across multiple lines. Some heuristics are
+# applied to avoid ugly line breaks.
+# Minimum value: 0, maximum value: 1000, default value: 17.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_WRAP_THRESHOLD = 17
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files. Explicit enabling an include graph, when INCLUDE_GRAPH is is set to NO,
+# can be accomplished by means of the command \includegraph. Disabling an
+# include graph can be accomplished by means of the command \hideincludegraph.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files. Explicit enabling an included by graph, when INCLUDED_BY_GRAPH is set
+# to NO, can be accomplished by means of the command \includedbygraph. Disabling
+# an included by graph can be accomplished by means of the command
+# \hideincludedbygraph.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH = NO
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories. Explicit enabling a directory graph, when
+# DIRECTORY_GRAPH is set to NO, can be accomplished by means of the command
+# \directorygraph. Disabling a directory graph can be accomplished by means of
+# the command \hidedirectorygraph.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH = YES
+
+# The DIR_GRAPH_MAX_DEPTH tag can be used to limit the maximum number of levels
+# of child directories generated in directory dependency graphs by dot.
+# Minimum value: 1, maximum value: 25, default value: 1.
+# This tag requires that the tag DIRECTORY_GRAPH is set to YES.
+
+DIR_GRAPH_MAX_DEPTH = 1
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# https://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS =
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file or to the filename of jar file
+# to be used. If left blank, it is assumed PlantUML is not used or called during
+# a preprocessing step. Doxygen will generate a warning when it encounters a
+# \startuml command in this case and will not generate output for the diagram.
+
+PLANTUML_JAR_PATH =
+
+# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
+# configuration file for plantuml.
+
+PLANTUML_CFG_FILE =
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS = NO
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# Note: This tag requires that UML_LOOK isn't set, i.e. the doxygen internal
+# graphical representation for inheritance and collaboration diagrams is used.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate
+# files that are used to generate the various graphs.
+#
+# Note: This setting is not only used for dot files but also for msc temporary
+# files.
+# The default value is: YES.
+
+DOT_CLEANUP = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. If the MSCGEN_TOOL tag is left empty (the default), then doxygen will
+# use a built-in version of mscgen tool to produce the charts. Alternatively,
+# the MSCGEN_TOOL tag can also specify the name an external tool. For instance,
+# specifying prog as the value, doxygen will call the tool as prog -T
+# <outfile_format> -o <outputfile> <inputfile>. The external tool should support
+# output file formats "png", "eps", "svg", and "ismap".
+
+MSCGEN_TOOL =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS =
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..65638b2
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,12 @@
+Copyright (c) 2024 Matt Strapp <matt+locusts@mattstrapp.net>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..c5e2094
--- /dev/null
+++ b/README.md
@@ -0,0 +1,43 @@
+# liboCusts
+
+## Description
+liboCusts is a library that provides a way to introduce locusts into your code.
+
+Why would you want to do that? I don't know.
+
+## Example
+```c
+#include "locusts.h"
+int main(void) {
+ introduce_locusts();
+ /* Locusts are now in your codebase! */
+ return 0;
+}
+```
+
+## Building
+Multiple build systems are supported.
+
+### Meson
+```sh
+meson setup builddir --buildtype=release
+meson compile -C builddir
+```
+
+### CMake
+```sh
+cmake -S . -B build
+cmake --build build
+```
+
+### Autoconf
+```sh
+autoreconf -fi
+./configure
+make
+```
+
+
+## License
+This project is licensed under the BSD Zero Clause License. See the [LICENSE](LICENSE) file for details.
+```
diff --git a/build-aux/ar-lib b/build-aux/ar-lib
new file mode 100755
index 0000000..c349042
--- /dev/null
+++ b/build-aux/ar-lib
@@ -0,0 +1,271 @@
+#! /bin/sh
+# Wrapper for Microsoft lib.exe
+
+me=ar-lib
+scriptversion=2019-07-04.01; # UTC
+
+# Copyright (C) 2010-2021 Free Software Foundation, Inc.
+# Written by Peter Rosin <peda@lysator.liu.se>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+
+# func_error message
+func_error ()
+{
+ echo "$me: $1" 1>&2
+ exit 1
+}
+
+file_conv=
+
+# func_file_conv build_file
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts.
+func_file_conv ()
+{
+ file=$1
+ case $file in
+ / | /[!/]*) # absolute file, and not a UNC file
+ if test -z "$file_conv"; then
+ # lazily determine how to convert abs files
+ case `uname -s` in
+ MINGW*)
+ file_conv=mingw
+ ;;
+ CYGWIN* | MSYS*)
+ file_conv=cygwin
+ ;;
+ *)
+ file_conv=wine
+ ;;
+ esac
+ fi
+ case $file_conv in
+ mingw)
+ file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+ ;;
+ cygwin | msys)
+ file=`cygpath -m "$file" || echo "$file"`
+ ;;
+ wine)
+ file=`winepath -w "$file" || echo "$file"`
+ ;;
+ esac
+ ;;
+ esac
+}
+
+# func_at_file at_file operation archive
+# Iterate over all members in AT_FILE performing OPERATION on ARCHIVE
+# for each of them.
+# When interpreting the content of the @FILE, do NOT use func_file_conv,
+# since the user would need to supply preconverted file names to
+# binutils ar, at least for MinGW.
+func_at_file ()
+{
+ operation=$2
+ archive=$3
+ at_file_contents=`cat "$1"`
+ eval set x "$at_file_contents"
+ shift
+
+ for member
+ do
+ $AR -NOLOGO $operation:"$member" "$archive" || exit $?
+ done
+}
+
+case $1 in
+ '')
+ func_error "no command. Try '$0 --help' for more information."
+ ;;
+ -h | --h*)
+ cat <<EOF
+Usage: $me [--help] [--version] PROGRAM ACTION ARCHIVE [MEMBER...]
+
+Members may be specified in a file named with @FILE.
+EOF
+ exit $?
+ ;;
+ -v | --v*)
+ echo "$me, version $scriptversion"
+ exit $?
+ ;;
+esac
+
+if test $# -lt 3; then
+ func_error "you must specify a program, an action and an archive"
+fi
+
+AR=$1
+shift
+while :
+do
+ if test $# -lt 2; then
+ func_error "you must specify a program, an action and an archive"
+ fi
+ case $1 in
+ -lib | -LIB \
+ | -ltcg | -LTCG \
+ | -machine* | -MACHINE* \
+ | -subsystem* | -SUBSYSTEM* \
+ | -verbose | -VERBOSE \
+ | -wx* | -WX* )
+ AR="$AR $1"
+ shift
+ ;;
+ *)
+ action=$1
+ shift
+ break
+ ;;
+ esac
+done
+orig_archive=$1
+shift
+func_file_conv "$orig_archive"
+archive=$file
+
+# strip leading dash in $action
+action=${action#-}
+
+delete=
+extract=
+list=
+quick=
+replace=
+index=
+create=
+
+while test -n "$action"
+do
+ case $action in
+ d*) delete=yes ;;
+ x*) extract=yes ;;
+ t*) list=yes ;;
+ q*) quick=yes ;;
+ r*) replace=yes ;;
+ s*) index=yes ;;
+ S*) ;; # the index is always updated implicitly
+ c*) create=yes ;;
+ u*) ;; # TODO: don't ignore the update modifier
+ v*) ;; # TODO: don't ignore the verbose modifier
+ *)
+ func_error "unknown action specified"
+ ;;
+ esac
+ action=${action#?}
+done
+
+case $delete$extract$list$quick$replace,$index in
+ yes,* | ,yes)
+ ;;
+ yesyes*)
+ func_error "more than one action specified"
+ ;;
+ *)
+ func_error "no action specified"
+ ;;
+esac
+
+if test -n "$delete"; then
+ if test ! -f "$orig_archive"; then
+ func_error "archive not found"
+ fi
+ for member
+ do
+ case $1 in
+ @*)
+ func_at_file "${1#@}" -REMOVE "$archive"
+ ;;
+ *)
+ func_file_conv "$1"
+ $AR -NOLOGO -REMOVE:"$file" "$archive" || exit $?
+ ;;
+ esac
+ done
+
+elif test -n "$extract"; then
+ if test ! -f "$orig_archive"; then
+ func_error "archive not found"
+ fi
+ if test $# -gt 0; then
+ for member
+ do
+ case $1 in
+ @*)
+ func_at_file "${1#@}" -EXTRACT "$archive"
+ ;;
+ *)
+ func_file_conv "$1"
+ $AR -NOLOGO -EXTRACT:"$file" "$archive" || exit $?
+ ;;
+ esac
+ done
+ else
+ $AR -NOLOGO -LIST "$archive" | tr -d '\r' | sed -e 's/\\/\\\\/g' \
+ | while read member
+ do
+ $AR -NOLOGO -EXTRACT:"$member" "$archive" || exit $?
+ done
+ fi
+
+elif test -n "$quick$replace"; then
+ if test ! -f "$orig_archive"; then
+ if test -z "$create"; then
+ echo "$me: creating $orig_archive"
+ fi
+ orig_archive=
+ else
+ orig_archive=$archive
+ fi
+
+ for member
+ do
+ case $1 in
+ @*)
+ func_file_conv "${1#@}"
+ set x "$@" "@$file"
+ ;;
+ *)
+ func_file_conv "$1"
+ set x "$@" "$file"
+ ;;
+ esac
+ shift
+ shift
+ done
+
+ if test -n "$orig_archive"; then
+ $AR -NOLOGO -OUT:"$archive" "$orig_archive" "$@" || exit $?
+ else
+ $AR -NOLOGO -OUT:"$archive" "$@" || exit $?
+ fi
+
+elif test -n "$list"; then
+ if test ! -f "$orig_archive"; then
+ func_error "archive not found"
+ fi
+ $AR -NOLOGO -LIST "$archive" || exit $?
+fi
diff --git a/build-aux/compile b/build-aux/compile
new file mode 100755
index 0000000..df363c8
--- /dev/null
+++ b/build-aux/compile
@@ -0,0 +1,348 @@
+#! /bin/sh
+# Wrapper for compilers which do not understand '-c -o'.
+
+scriptversion=2018-03-07.03; # UTC
+
+# Copyright (C) 1999-2021 Free Software Foundation, Inc.
+# Written by Tom Tromey <tromey@cygnus.com>.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <bug-automake@gnu.org> or send patches to
+# <automake-patches@gnu.org>.
+
+nl='
+'
+
+# We need space, tab and new line, in precisely that order. Quoting is
+# there to prevent tools from complaining about whitespace usage.
+IFS=" "" $nl"
+
+file_conv=
+
+# func_file_conv build_file lazy
+# Convert a $build file to $host form and store it in $file
+# Currently only supports Windows hosts. If the determined conversion
+# type is listed in (the comma separated) LAZY, no conversion will
+# take place.
+func_file_conv ()
+{
+ file=$1
+ case $file in
+ / | /[!/]*) # absolute file, and not a UNC file
+ if test -z "$file_conv"; then
+ # lazily determine how to convert abs files
+ case `uname -s` in
+ MINGW*)
+ file_conv=mingw
+ ;;
+ CYGWIN* | MSYS*)
+ file_conv=cygwin
+ ;;
+ *)
+ file_conv=wine
+ ;;
+ esac
+ fi
+ case $file_conv/,$2, in
+ *,$file_conv,*)
+ ;;
+ mingw/*)
+ file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
+ ;;
+ cygwin/* | msys/*)
+ file=`cygpath -m "$file" || echo "$file"`
+ ;;
+ wine/*)
+ file=`winepath -w "$file" || echo "$file"`
+ ;;
+ esac
+ ;;
+ esac
+}
+
+# func_cl_dashL linkdir
+# Make cl look for libraries in LINKDIR
+func_cl_dashL ()
+{
+ func_file_conv "$1"
+ if test -z "$lib_path"; then
+ lib_path=$file
+ else
+ lib_path="$lib_path;$file"
+ fi
+ linker_opts="$linker_opts -LIBPATH:$file"
+}
+
+# func_cl_dashl library
+# Do a library search-path lookup for cl
+func_cl_dashl ()
+{
+ lib=$1
+ found=no
+ save_IFS=$IFS
+ IFS=';'
+ for dir in $lib_path $LIB
+ do
+ IFS=$save_IFS
+ if $shared && test -f "$dir/$lib.dll.lib"; then
+ found=yes
+ lib=$dir/$lib.dll.lib
+ break
+ fi
+ if test -f "$dir/$lib.lib"; then
+ found=yes
+ lib=$dir/$lib.lib
+ break
+ fi
+ if test -f "$dir/lib$lib.a"; then
+ found=yes
+ lib=$dir/lib$lib.a
+ break
+ fi
+ done
+ IFS=$save_IFS
+
+ if test "$found" != yes; then
+ lib=$lib.lib
+ fi
+}
+
+# func_cl_wrapper cl arg...
+# Adjust compile command to suit cl
+func_cl_wrapper ()
+{
+ # Assume a capable shell
+ lib_path=
+ shared=:
+ linker_opts=
+ for arg
+ do
+ if test -n "$eat"; then
+ eat=
+ else
+ case $1 in
+ -o)
+ # configure might choose to run compile as 'compile cc -o foo foo.c'.
+ eat=1
+ case $2 in
+ *.o | *.[oO][bB][jJ])
+ func_file_conv "$2"
+ set x "$@" -Fo"$file"
+ shift
+ ;;
+ *)
+ func_file_conv "$2"
+ set x "$@" -Fe"$file"
+ shift
+ ;;
+ esac
+ ;;
+ -I)
+ eat=1
+ func_file_conv "$2" mingw
+ set x "$@" -I"$file"
+ shift
+ ;;
+ -I*)
+ func_file_conv "${1#-I}" mingw
+ set x "$@" -I"$file"
+ shift
+ ;;
+ -l)
+ eat=1
+ func_cl_dashl "$2"
+ set x "$@" "$lib"
+ shift
+ ;;
+ -l*)
+ func_cl_dashl "${1#-l}"
+ set x "$@" "$lib"
+ shift
+ ;;
+ -L)
+ eat=1
+ func_cl_dashL "$2"
+ ;;
+ -L*)
+ func_cl_dashL "${1#-L}"
+ ;;
+ -static)
+ shared=false
+ ;;
+ -Wl,*)
+ arg=${1#-Wl,}
+ save_ifs="$IFS"; IFS=','
+ for flag in $arg; do
+ IFS="$save_ifs"
+ linker_opts="$linker_opts $flag"
+ done
+ IFS="$save_ifs"
+ ;;
+ -Xlinker)
+ eat=1
+ linker_opts="$linker_opts $2"
+ ;;
+ -*)
+ set x "$@" "$1"
+ shift
+ ;;
+ *.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
+ func_file_conv "$1"
+ set x "$@" -Tp"$file"
+ shift
+ ;;
+ *.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
+ func_file_conv "$1" mingw
+ set x "$@" "$file"
+ shift
+ ;;
+ *)
+ set x "$@" "$1"
+ shift
+ ;;
+ esac
+ fi
+ shift
+ done
+ if test -n "$linker_opts"; then
+ linker_opts="-link$linker_opts"
+ fi
+ exec "$@" $linker_opts
+ exit 1
+}
+
+eat=
+
+case $1 in
+ '')
+ echo "$0: No command. Try '$0 --help' for more information." 1>&2
+ exit 1;
+ ;;
+ -h | --h*)
+ cat <<\EOF
+Usage: compile [--help] [--version] PROGRAM [ARGS]
+
+Wrapper for compilers which do not understand '-c -o'.
+Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
+arguments, and rename the output as expected.
+
+If you are trying to build a whole package this is not the
+right script to run: please start by reading the file 'INSTALL'.
+
+Report bugs to <bug-automake@gnu.org>.
+EOF
+ exit $?
+ ;;
+ -v | --v*)
+ echo "compile $scriptversion"
+ exit $?
+ ;;
+ cl | *[/\\]cl | cl.exe | *[/\\]cl.exe | \
+ icl | *[/\\]icl | icl.exe | *[/\\]icl.exe )
+ func_cl_wrapper "$@" # Doesn't return...
+ ;;
+esac
+
+ofile=
+cfile=
+
+for arg
+do
+ if test -n "$eat"; then
+ eat=
+ else
+ case $1 in
+ -o)
+ # configure might choose to run compile as 'compile cc -o foo foo.c'.
+ # So we strip '-o arg' only if arg is an object.
+ eat=1
+ case $2 in
+ *.o | *.obj)
+ ofile=$2
+ ;;
+ *)
+ set x "$@" -o "$2"
+ shift
+ ;;
+ esac
+ ;;
+ *.c)
+ cfile=$1
+ set x "$@" "$1"
+ shift
+ ;;
+ *)
+ set x "$@" "$1"
+ shift
+ ;;
+ esac
+ fi
+ shift
+done
+
+if test -z "$ofile" || test -z "$cfile"; then
+ # If no '-o' option was seen then we might have been invoked from a
+ # pattern rule where we don't need one. That is ok -- this is a
+ # normal compilation that the losing compiler can handle. If no
+ # '.c' file was seen then we are probably linking. That is also
+ # ok.
+ exec "$@"
+fi
+
+# Name of file we expect compiler to create.
+cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
+
+# Create the lock directory.
+# Note: use '[/\\:.-]' here to ensure that we don't use the same name
+# that we are using for the .o file. Also, base the name on the expected
+# object file name, since that is what matters with a parallel build.
+lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
+while true; do
+ if mkdir "$lockdir" >/dev/null 2>&1; then
+ break
+ fi
+ sleep 1
+done
+# FIXME: race condition here if user kills between mkdir and trap.
+trap "rmdir '$lockdir'; exit 1" 1 2 15
+
+# Run the compile.
+"$@"
+ret=$?
+
+if test -f "$cofile"; then
+ test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
+elif test -f "${cofile}bj"; then
+ test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
+fi
+
+rmdir "$lockdir"
+exit $ret
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC0"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/build-aux/config.guess b/build-aux/config.guess
new file mode 100755
index 0000000..cdfc439
--- /dev/null
+++ b/build-aux/config.guess
@@ -0,0 +1,1807 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+# Copyright 1992-2023 Free Software Foundation, Inc.
+
+# shellcheck disable=SC2006,SC2268 # see below for rationale
+
+timestamp='2023-08-22'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program. This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+#
+# Originally written by Per Bothner; maintained since 2000 by Ben Elliston.
+#
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
+#
+# Please send patches to <config-patches@gnu.org>.
+
+
+# The "shellcheck disable" line above the timestamp inhibits complaints
+# about features and limitations of the classic Bourne shell that were
+# superseded or lifted in POSIX. However, this script identifies a wide
+# variety of pre-POSIX systems that do not have POSIX shells at all, and
+# even some reasonably current systems (Solaris 10 as case-in-point) still
+# have a pre-POSIX /bin/sh.
+
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system '$me' is run on.
+
+Options:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright 1992-2023 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try '$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help" >&2
+ exit 1 ;;
+ * )
+ break ;;
+ esac
+done
+
+if test $# != 0; then
+ echo "$me: too many arguments$help" >&2
+ exit 1
+fi
+
+# Just in case it came from the environment.
+GUESS=
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, 'CC_FOR_BUILD' used to be named 'HOST_CC'. We still
+# use 'HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+tmp=
+# shellcheck disable=SC2172
+trap 'test -z "$tmp" || rm -fr "$tmp"' 0 1 2 13 15
+
+set_cc_for_build() {
+ # prevent multiple calls if $tmp is already set
+ test "$tmp" && return 0
+ : "${TMPDIR=/tmp}"
+ # shellcheck disable=SC2039,SC3028
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; }
+ dummy=$tmp/dummy
+ case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in
+ ,,) echo "int x;" > "$dummy.c"
+ for driver in cc gcc c89 c99 ; do
+ if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then
+ CC_FOR_BUILD=$driver
+ break
+ fi
+ done
+ if test x"$CC_FOR_BUILD" = x ; then
+ CC_FOR_BUILD=no_compiler_found
+ fi
+ ;;
+ ,,*) CC_FOR_BUILD=$CC ;;
+ ,*,*) CC_FOR_BUILD=$HOST_CC ;;
+ esac
+}
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if test -f /.attbin/uname ; then
+ PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+case $UNAME_SYSTEM in
+Linux|GNU|GNU/*)
+ LIBC=unknown
+
+ set_cc_for_build
+ cat <<-EOF > "$dummy.c"
+ #if defined(__ANDROID__)
+ LIBC=android
+ #else
+ #include <features.h>
+ #if defined(__UCLIBC__)
+ LIBC=uclibc
+ #elif defined(__dietlibc__)
+ LIBC=dietlibc
+ #elif defined(__GLIBC__)
+ LIBC=gnu
+ #else
+ #include <stdarg.h>
+ /* First heuristic to detect musl libc. */
+ #ifdef __DEFINED_va_list
+ LIBC=musl
+ #endif
+ #endif
+ #endif
+ EOF
+ cc_set_libc=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`
+ eval "$cc_set_libc"
+
+ # Second heuristic to detect musl libc.
+ if [ "$LIBC" = unknown ] &&
+ command -v ldd >/dev/null &&
+ ldd --version 2>&1 | grep -q ^musl; then
+ LIBC=musl
+ fi
+
+ # If the system lacks a compiler, then just pick glibc.
+ # We could probably try harder.
+ if [ "$LIBC" = unknown ]; then
+ LIBC=gnu
+ fi
+ ;;
+esac
+
+# Note: order is significant - the case branches are not exclusive.
+
+case $UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION in
+ *:NetBSD:*:*)
+ # NetBSD (nbsd) targets should (where applicable) match one or
+ # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
+ # switched to ELF, *-*-netbsd* would select the old
+ # object file format. This provides both forward
+ # compatibility and a consistent mechanism for selecting the
+ # object file format.
+ #
+ # Note: NetBSD doesn't particularly care about the vendor
+ # portion of the name. We always set it to "unknown".
+ UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \
+ /sbin/sysctl -n hw.machine_arch 2>/dev/null || \
+ /usr/sbin/sysctl -n hw.machine_arch 2>/dev/null || \
+ echo unknown)`
+ case $UNAME_MACHINE_ARCH in
+ aarch64eb) machine=aarch64_be-unknown ;;
+ armeb) machine=armeb-unknown ;;
+ arm*) machine=arm-unknown ;;
+ sh3el) machine=shl-unknown ;;
+ sh3eb) machine=sh-unknown ;;
+ sh5el) machine=sh5le-unknown ;;
+ earmv*)
+ arch=`echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,'`
+ endian=`echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p'`
+ machine=${arch}${endian}-unknown
+ ;;
+ *) machine=$UNAME_MACHINE_ARCH-unknown ;;
+ esac
+ # The Operating System including object format, if it has switched
+ # to ELF recently (or will in the future) and ABI.
+ case $UNAME_MACHINE_ARCH in
+ earm*)
+ os=netbsdelf
+ ;;
+ arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+ set_cc_for_build
+ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ELF__
+ then
+ # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+ # Return netbsd for either. FIX?
+ os=netbsd
+ else
+ os=netbsdelf
+ fi
+ ;;
+ *)
+ os=netbsd
+ ;;
+ esac
+ # Determine ABI tags.
+ case $UNAME_MACHINE_ARCH in
+ earm*)
+ expr='s/^earmv[0-9]/-eabi/;s/eb$//'
+ abi=`echo "$UNAME_MACHINE_ARCH" | sed -e "$expr"`
+ ;;
+ esac
+ # The OS release
+ # Debian GNU/NetBSD machines have a different userland, and
+ # thus, need a distinct triplet. However, they do not need
+ # kernel version information, so it can be replaced with a
+ # suitable tag, in the style of linux-gnu.
+ case $UNAME_VERSION in
+ Debian*)
+ release='-gnu'
+ ;;
+ *)
+ release=`echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2`
+ ;;
+ esac
+ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+ # contains redundant information, the shorter form:
+ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+ GUESS=$machine-${os}${release}${abi-}
+ ;;
+ *:Bitrig:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'`
+ GUESS=$UNAME_MACHINE_ARCH-unknown-bitrig$UNAME_RELEASE
+ ;;
+ *:OpenBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+ GUESS=$UNAME_MACHINE_ARCH-unknown-openbsd$UNAME_RELEASE
+ ;;
+ *:SecBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/SecBSD.//'`
+ GUESS=$UNAME_MACHINE_ARCH-unknown-secbsd$UNAME_RELEASE
+ ;;
+ *:LibertyBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'`
+ GUESS=$UNAME_MACHINE_ARCH-unknown-libertybsd$UNAME_RELEASE
+ ;;
+ *:MidnightBSD:*:*)
+ GUESS=$UNAME_MACHINE-unknown-midnightbsd$UNAME_RELEASE
+ ;;
+ *:ekkoBSD:*:*)
+ GUESS=$UNAME_MACHINE-unknown-ekkobsd$UNAME_RELEASE
+ ;;
+ *:SolidBSD:*:*)
+ GUESS=$UNAME_MACHINE-unknown-solidbsd$UNAME_RELEASE
+ ;;
+ *:OS108:*:*)
+ GUESS=$UNAME_MACHINE-unknown-os108_$UNAME_RELEASE
+ ;;
+ macppc:MirBSD:*:*)
+ GUESS=powerpc-unknown-mirbsd$UNAME_RELEASE
+ ;;
+ *:MirBSD:*:*)
+ GUESS=$UNAME_MACHINE-unknown-mirbsd$UNAME_RELEASE
+ ;;
+ *:Sortix:*:*)
+ GUESS=$UNAME_MACHINE-unknown-sortix
+ ;;
+ *:Twizzler:*:*)
+ GUESS=$UNAME_MACHINE-unknown-twizzler
+ ;;
+ *:Redox:*:*)
+ GUESS=$UNAME_MACHINE-unknown-redox
+ ;;
+ mips:OSF1:*.*)
+ GUESS=mips-dec-osf1
+ ;;
+ alpha:OSF1:*:*)
+ # Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+ trap '' 0
+ case $UNAME_RELEASE in
+ *4.0)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+ ;;
+ *5.*)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+ ;;
+ esac
+ # According to Compaq, /usr/sbin/psrinfo has been available on
+ # OSF/1 and Tru64 systems produced since 1995. I hope that
+ # covers most systems running today. This code pipes the CPU
+ # types through head -n 1, so we only detect the type of CPU 0.
+ ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+ case $ALPHA_CPU_TYPE in
+ "EV4 (21064)")
+ UNAME_MACHINE=alpha ;;
+ "EV4.5 (21064)")
+ UNAME_MACHINE=alpha ;;
+ "LCA4 (21066/21068)")
+ UNAME_MACHINE=alpha ;;
+ "EV5 (21164)")
+ UNAME_MACHINE=alphaev5 ;;
+ "EV5.6 (21164A)")
+ UNAME_MACHINE=alphaev56 ;;
+ "EV5.6 (21164PC)")
+ UNAME_MACHINE=alphapca56 ;;
+ "EV5.7 (21164PC)")
+ UNAME_MACHINE=alphapca57 ;;
+ "EV6 (21264)")
+ UNAME_MACHINE=alphaev6 ;;
+ "EV6.7 (21264A)")
+ UNAME_MACHINE=alphaev67 ;;
+ "EV6.8CB (21264C)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8AL (21264B)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8CX (21264D)")
+ UNAME_MACHINE=alphaev68 ;;
+ "EV6.9A (21264/EV69A)")
+ UNAME_MACHINE=alphaev69 ;;
+ "EV7 (21364)")
+ UNAME_MACHINE=alphaev7 ;;
+ "EV7.9 (21364A)")
+ UNAME_MACHINE=alphaev79 ;;
+ esac
+ # A Pn.n version is a patched version.
+ # A Vn.n version is a released version.
+ # A Tn.n version is a released field test version.
+ # A Xn.n version is an unreleased experimental baselevel.
+ # 1.2 uses "1.2" for uname -r.
+ OSF_REL=`echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
+ GUESS=$UNAME_MACHINE-dec-osf$OSF_REL
+ ;;
+ Amiga*:UNIX_System_V:4.0:*)
+ GUESS=m68k-unknown-sysv4
+ ;;
+ *:[Aa]miga[Oo][Ss]:*:*)
+ GUESS=$UNAME_MACHINE-unknown-amigaos
+ ;;
+ *:[Mm]orph[Oo][Ss]:*:*)
+ GUESS=$UNAME_MACHINE-unknown-morphos
+ ;;
+ *:OS/390:*:*)
+ GUESS=i370-ibm-openedition
+ ;;
+ *:z/VM:*:*)
+ GUESS=s390-ibm-zvmoe
+ ;;
+ *:OS400:*:*)
+ GUESS=powerpc-ibm-os400
+ ;;
+ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+ GUESS=arm-acorn-riscix$UNAME_RELEASE
+ ;;
+ arm*:riscos:*:*|arm*:RISCOS:*:*)
+ GUESS=arm-unknown-riscos
+ ;;
+ SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+ GUESS=hppa1.1-hitachi-hiuxmpp
+ ;;
+ Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+ # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+ case `(/bin/universe) 2>/dev/null` in
+ att) GUESS=pyramid-pyramid-sysv3 ;;
+ *) GUESS=pyramid-pyramid-bsd ;;
+ esac
+ ;;
+ NILE*:*:*:dcosx)
+ GUESS=pyramid-pyramid-svr4
+ ;;
+ DRS?6000:unix:4.0:6*)
+ GUESS=sparc-icl-nx6
+ ;;
+ DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+ case `/usr/bin/uname -p` in
+ sparc) GUESS=sparc-icl-nx7 ;;
+ esac
+ ;;
+ s390x:SunOS:*:*)
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=$UNAME_MACHINE-ibm-solaris2$SUN_REL
+ ;;
+ sun4H:SunOS:5.*:*)
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=sparc-hal-solaris2$SUN_REL
+ ;;
+ sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=sparc-sun-solaris2$SUN_REL
+ ;;
+ i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+ GUESS=i386-pc-auroraux$UNAME_RELEASE
+ ;;
+ i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+ set_cc_for_build
+ SUN_ARCH=i386
+ # If there is a compiler, see if it is configured for 64-bit objects.
+ # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+ # This test works for both compilers.
+ if test "$CC_FOR_BUILD" != no_compiler_found; then
+ if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -m64 -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ SUN_ARCH=x86_64
+ fi
+ fi
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=$SUN_ARCH-pc-solaris2$SUN_REL
+ ;;
+ sun4*:SunOS:6*:*)
+ # According to config.sub, this is the proper way to canonicalize
+ # SunOS6. Hard to guess exactly what SunOS6 will be like, but
+ # it's likely to be more like Solaris than SunOS4.
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=sparc-sun-solaris3$SUN_REL
+ ;;
+ sun4*:SunOS:*:*)
+ case `/usr/bin/arch -k` in
+ Series*|S4*)
+ UNAME_RELEASE=`uname -v`
+ ;;
+ esac
+ # Japanese Language versions have a version number like '4.1.3-JL'.
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/-/_/'`
+ GUESS=sparc-sun-sunos$SUN_REL
+ ;;
+ sun3*:SunOS:*:*)
+ GUESS=m68k-sun-sunos$UNAME_RELEASE
+ ;;
+ sun*:*:4.2BSD:*)
+ UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+ test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3
+ case `/bin/arch` in
+ sun3)
+ GUESS=m68k-sun-sunos$UNAME_RELEASE
+ ;;
+ sun4)
+ GUESS=sparc-sun-sunos$UNAME_RELEASE
+ ;;
+ esac
+ ;;
+ aushp:SunOS:*:*)
+ GUESS=sparc-auspex-sunos$UNAME_RELEASE
+ ;;
+ # The situation for MiNT is a little confusing. The machine name
+ # can be virtually everything (everything which is not
+ # "atarist" or "atariste" at least should have a processor
+ # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
+ # to the lowercase version "mint" (or "freemint"). Finally
+ # the system name "TOS" denotes a system which is actually not
+ # MiNT. But MiNT is downward compatible to TOS, so this should
+ # be no problem.
+ atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+ GUESS=m68k-atari-mint$UNAME_RELEASE
+ ;;
+ atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+ GUESS=m68k-atari-mint$UNAME_RELEASE
+ ;;
+ *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+ GUESS=m68k-atari-mint$UNAME_RELEASE
+ ;;
+ milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+ GUESS=m68k-milan-mint$UNAME_RELEASE
+ ;;
+ hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+ GUESS=m68k-hades-mint$UNAME_RELEASE
+ ;;
+ *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+ GUESS=m68k-unknown-mint$UNAME_RELEASE
+ ;;
+ m68k:machten:*:*)
+ GUESS=m68k-apple-machten$UNAME_RELEASE
+ ;;
+ powerpc:machten:*:*)
+ GUESS=powerpc-apple-machten$UNAME_RELEASE
+ ;;
+ RISC*:Mach:*:*)
+ GUESS=mips-dec-mach_bsd4.3
+ ;;
+ RISC*:ULTRIX:*:*)
+ GUESS=mips-dec-ultrix$UNAME_RELEASE
+ ;;
+ VAX*:ULTRIX*:*:*)
+ GUESS=vax-dec-ultrix$UNAME_RELEASE
+ ;;
+ 2020:CLIX:*:* | 2430:CLIX:*:*)
+ GUESS=clipper-intergraph-clix$UNAME_RELEASE
+ ;;
+ mips:*:*:UMIPS | mips:*:*:RISCos)
+ set_cc_for_build
+ sed 's/^ //' << EOF > "$dummy.c"
+#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
+ int main (int argc, char *argv[]) {
+#else
+ int main (argc, argv) int argc; char *argv[]; {
+#endif
+ #if defined (host_mips) && defined (MIPSEB)
+ #if defined (SYSTYPE_SYSV)
+ printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_SVR4)
+ printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+ printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0);
+ #endif
+ #endif
+ exit (-1);
+ }
+EOF
+ $CC_FOR_BUILD -o "$dummy" "$dummy.c" &&
+ dummyarg=`echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+ SYSTEM_NAME=`"$dummy" "$dummyarg"` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ GUESS=mips-mips-riscos$UNAME_RELEASE
+ ;;
+ Motorola:PowerMAX_OS:*:*)
+ GUESS=powerpc-motorola-powermax
+ ;;
+ Motorola:*:4.3:PL8-*)
+ GUESS=powerpc-harris-powermax
+ ;;
+ Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+ GUESS=powerpc-harris-powermax
+ ;;
+ Night_Hawk:Power_UNIX:*:*)
+ GUESS=powerpc-harris-powerunix
+ ;;
+ m88k:CX/UX:7*:*)
+ GUESS=m88k-harris-cxux7
+ ;;
+ m88k:*:4*:R4*)
+ GUESS=m88k-motorola-sysv4
+ ;;
+ m88k:*:3*:R3*)
+ GUESS=m88k-motorola-sysv3
+ ;;
+ AViiON:dgux:*:*)
+ # DG/UX returns AViiON for all architectures
+ UNAME_PROCESSOR=`/usr/bin/uname -p`
+ if test "$UNAME_PROCESSOR" = mc88100 || test "$UNAME_PROCESSOR" = mc88110
+ then
+ if test "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx || \
+ test "$TARGET_BINARY_INTERFACE"x = x
+ then
+ GUESS=m88k-dg-dgux$UNAME_RELEASE
+ else
+ GUESS=m88k-dg-dguxbcs$UNAME_RELEASE
+ fi
+ else
+ GUESS=i586-dg-dgux$UNAME_RELEASE
+ fi
+ ;;
+ M88*:DolphinOS:*:*) # DolphinOS (SVR3)
+ GUESS=m88k-dolphin-sysv3
+ ;;
+ M88*:*:R3*:*)
+ # Delta 88k system running SVR3
+ GUESS=m88k-motorola-sysv3
+ ;;
+ XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+ GUESS=m88k-tektronix-sysv3
+ ;;
+ Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+ GUESS=m68k-tektronix-bsd
+ ;;
+ *:IRIX*:*:*)
+ IRIX_REL=`echo "$UNAME_RELEASE" | sed -e 's/-/_/g'`
+ GUESS=mips-sgi-irix$IRIX_REL
+ ;;
+ ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+ GUESS=romp-ibm-aix # uname -m gives an 8 hex-code CPU id
+ ;; # Note that: echo "'`uname -s`'" gives 'AIX '
+ i*86:AIX:*:*)
+ GUESS=i386-ibm-aix
+ ;;
+ ia64:AIX:*:*)
+ if test -x /usr/bin/oslevel ; then
+ IBM_REV=`/usr/bin/oslevel`
+ else
+ IBM_REV=$UNAME_VERSION.$UNAME_RELEASE
+ fi
+ GUESS=$UNAME_MACHINE-ibm-aix$IBM_REV
+ ;;
+ *:AIX:2:3)
+ if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+ set_cc_for_build
+ sed 's/^ //' << EOF > "$dummy.c"
+ #include <sys/systemcfg.h>
+
+ main()
+ {
+ if (!__power_pc())
+ exit(1);
+ puts("powerpc-ibm-aix3.2.5");
+ exit(0);
+ }
+EOF
+ if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"`
+ then
+ GUESS=$SYSTEM_NAME
+ else
+ GUESS=rs6000-ibm-aix3.2.5
+ fi
+ elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+ GUESS=rs6000-ibm-aix3.2.4
+ else
+ GUESS=rs6000-ibm-aix3.2
+ fi
+ ;;
+ *:AIX:*:[4567])
+ IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+ if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then
+ IBM_ARCH=rs6000
+ else
+ IBM_ARCH=powerpc
+ fi
+ if test -x /usr/bin/lslpp ; then
+ IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | \
+ awk -F: '{ print $3 }' | sed s/[0-9]*$/0/`
+ else
+ IBM_REV=$UNAME_VERSION.$UNAME_RELEASE
+ fi
+ GUESS=$IBM_ARCH-ibm-aix$IBM_REV
+ ;;
+ *:AIX:*:*)
+ GUESS=rs6000-ibm-aix
+ ;;
+ ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*)
+ GUESS=romp-ibm-bsd4.4
+ ;;
+ ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
+ GUESS=romp-ibm-bsd$UNAME_RELEASE # 4.3 with uname added to
+ ;; # report: romp-ibm BSD 4.3
+ *:BOSX:*:*)
+ GUESS=rs6000-bull-bosx
+ ;;
+ DPX/2?00:B.O.S.:*:*)
+ GUESS=m68k-bull-sysv3
+ ;;
+ 9000/[34]??:4.3bsd:1.*:*)
+ GUESS=m68k-hp-bsd
+ ;;
+ hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+ GUESS=m68k-hp-bsd4.4
+ ;;
+ 9000/[34678]??:HP-UX:*:*)
+ HPUX_REV=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*.[0B]*//'`
+ case $UNAME_MACHINE in
+ 9000/31?) HP_ARCH=m68000 ;;
+ 9000/[34]??) HP_ARCH=m68k ;;
+ 9000/[678][0-9][0-9])
+ if test -x /usr/bin/getconf; then
+ sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+ sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+ case $sc_cpu_version in
+ 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0
+ 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1
+ 532) # CPU_PA_RISC2_0
+ case $sc_kernel_bits in
+ 32) HP_ARCH=hppa2.0n ;;
+ 64) HP_ARCH=hppa2.0w ;;
+ '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20
+ esac ;;
+ esac
+ fi
+ if test "$HP_ARCH" = ""; then
+ set_cc_for_build
+ sed 's/^ //' << EOF > "$dummy.c"
+
+ #define _HPUX_SOURCE
+ #include <stdlib.h>
+ #include <unistd.h>
+
+ int main ()
+ {
+ #if defined(_SC_KERNEL_BITS)
+ long bits = sysconf(_SC_KERNEL_BITS);
+ #endif
+ long cpu = sysconf (_SC_CPU_VERSION);
+
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+ case CPU_PA_RISC2_0:
+ #if defined(_SC_KERNEL_BITS)
+ switch (bits)
+ {
+ case 64: puts ("hppa2.0w"); break;
+ case 32: puts ("hppa2.0n"); break;
+ default: puts ("hppa2.0"); break;
+ } break;
+ #else /* !defined(_SC_KERNEL_BITS) */
+ puts ("hppa2.0"); break;
+ #endif
+ default: puts ("hppa1.0"); break;
+ }
+ exit (0);
+ }
+EOF
+ (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=`"$dummy"`
+ test -z "$HP_ARCH" && HP_ARCH=hppa
+ fi ;;
+ esac
+ if test "$HP_ARCH" = hppa2.0w
+ then
+ set_cc_for_build
+
+ # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+ # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
+ # generating 64-bit code. GNU and HP use different nomenclature:
+ #
+ # $ CC_FOR_BUILD=cc ./config.guess
+ # => hppa2.0w-hp-hpux11.23
+ # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+ # => hppa64-hp-hpux11.23
+
+ if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) |
+ grep -q __LP64__
+ then
+ HP_ARCH=hppa2.0w
+ else
+ HP_ARCH=hppa64
+ fi
+ fi
+ GUESS=$HP_ARCH-hp-hpux$HPUX_REV
+ ;;
+ ia64:HP-UX:*:*)
+ HPUX_REV=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*.[0B]*//'`
+ GUESS=ia64-hp-hpux$HPUX_REV
+ ;;
+ 3050*:HI-UX:*:*)
+ set_cc_for_build
+ sed 's/^ //' << EOF > "$dummy.c"
+ #include <unistd.h>
+ int
+ main ()
+ {
+ long cpu = sysconf (_SC_CPU_VERSION);
+ /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+ true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
+ results, however. */
+ if (CPU_IS_PA_RISC (cpu))
+ {
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+ default: puts ("hppa-hitachi-hiuxwe2"); break;
+ }
+ }
+ else if (CPU_IS_HP_MC68K (cpu))
+ puts ("m68k-hitachi-hiuxwe2");
+ else puts ("unknown-hitachi-hiuxwe2");
+ exit (0);
+ }
+EOF
+ $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ GUESS=unknown-hitachi-hiuxwe2
+ ;;
+ 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*)
+ GUESS=hppa1.1-hp-bsd
+ ;;
+ 9000/8??:4.3bsd:*:*)
+ GUESS=hppa1.0-hp-bsd
+ ;;
+ *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+ GUESS=hppa1.0-hp-mpeix
+ ;;
+ hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*)
+ GUESS=hppa1.1-hp-osf
+ ;;
+ hp8??:OSF1:*:*)
+ GUESS=hppa1.0-hp-osf
+ ;;
+ i*86:OSF1:*:*)
+ if test -x /usr/sbin/sysversion ; then
+ GUESS=$UNAME_MACHINE-unknown-osf1mk
+ else
+ GUESS=$UNAME_MACHINE-unknown-osf1
+ fi
+ ;;
+ parisc*:Lites*:*:*)
+ GUESS=hppa1.1-hp-lites
+ ;;
+ C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+ GUESS=c1-convex-bsd
+ ;;
+ C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+ if getsysinfo -f scalar_acc
+ then echo c32-convex-bsd
+ else echo c2-convex-bsd
+ fi
+ exit ;;
+ C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+ GUESS=c34-convex-bsd
+ ;;
+ C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+ GUESS=c38-convex-bsd
+ ;;
+ C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+ GUESS=c4-convex-bsd
+ ;;
+ CRAY*Y-MP:*:*:*)
+ CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+ GUESS=ymp-cray-unicos$CRAY_REL
+ ;;
+ CRAY*[A-Z]90:*:*:*)
+ echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \
+ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+ -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*TS:*:*:*)
+ CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+ GUESS=t90-cray-unicos$CRAY_REL
+ ;;
+ CRAY*T3E:*:*:*)
+ CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+ GUESS=alphaev5-cray-unicosmk$CRAY_REL
+ ;;
+ CRAY*SV1:*:*:*)
+ CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+ GUESS=sv1-cray-unicos$CRAY_REL
+ ;;
+ *:UNICOS/mp:*:*)
+ CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+ GUESS=craynv-cray-unicosmp$CRAY_REL
+ ;;
+ F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+ FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
+ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+ FUJITSU_REL=`echo "$UNAME_RELEASE" | sed -e 's/ /_/'`
+ GUESS=${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}
+ ;;
+ 5000:UNIX_System_V:4.*:*)
+ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+ FUJITSU_REL=`echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'`
+ GUESS=sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}
+ ;;
+ i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+ GUESS=$UNAME_MACHINE-pc-bsdi$UNAME_RELEASE
+ ;;
+ sparc*:BSD/OS:*:*)
+ GUESS=sparc-unknown-bsdi$UNAME_RELEASE
+ ;;
+ *:BSD/OS:*:*)
+ GUESS=$UNAME_MACHINE-unknown-bsdi$UNAME_RELEASE
+ ;;
+ arm:FreeBSD:*:*)
+ UNAME_PROCESSOR=`uname -p`
+ set_cc_for_build
+ if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_PCS_VFP
+ then
+ FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+ GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL-gnueabi
+ else
+ FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+ GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL-gnueabihf
+ fi
+ ;;
+ *:FreeBSD:*:*)
+ UNAME_PROCESSOR=`uname -p`
+ case $UNAME_PROCESSOR in
+ amd64)
+ UNAME_PROCESSOR=x86_64 ;;
+ i386)
+ UNAME_PROCESSOR=i586 ;;
+ esac
+ FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+ GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL
+ ;;
+ i*:CYGWIN*:*)
+ GUESS=$UNAME_MACHINE-pc-cygwin
+ ;;
+ *:MINGW64*:*)
+ GUESS=$UNAME_MACHINE-pc-mingw64
+ ;;
+ *:MINGW*:*)
+ GUESS=$UNAME_MACHINE-pc-mingw32
+ ;;
+ *:MSYS*:*)
+ GUESS=$UNAME_MACHINE-pc-msys
+ ;;
+ i*:PW*:*)
+ GUESS=$UNAME_MACHINE-pc-pw32
+ ;;
+ *:SerenityOS:*:*)
+ GUESS=$UNAME_MACHINE-pc-serenity
+ ;;
+ *:Interix*:*)
+ case $UNAME_MACHINE in
+ x86)
+ GUESS=i586-pc-interix$UNAME_RELEASE
+ ;;
+ authenticamd | genuineintel | EM64T)
+ GUESS=x86_64-unknown-interix$UNAME_RELEASE
+ ;;
+ IA64)
+ GUESS=ia64-unknown-interix$UNAME_RELEASE
+ ;;
+ esac ;;
+ i*:UWIN*:*)
+ GUESS=$UNAME_MACHINE-pc-uwin
+ ;;
+ amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+ GUESS=x86_64-pc-cygwin
+ ;;
+ prep*:SunOS:5.*:*)
+ SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+ GUESS=powerpcle-unknown-solaris2$SUN_REL
+ ;;
+ *:GNU:*:*)
+ # the GNU system
+ GNU_ARCH=`echo "$UNAME_MACHINE" | sed -e 's,[-/].*$,,'`
+ GNU_REL=`echo "$UNAME_RELEASE" | sed -e 's,/.*$,,'`
+ GUESS=$GNU_ARCH-unknown-$LIBC$GNU_REL
+ ;;
+ *:GNU/*:*:*)
+ # other systems with GNU libc and userland
+ GNU_SYS=`echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"`
+ GNU_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+ GUESS=$UNAME_MACHINE-unknown-$GNU_SYS$GNU_REL-$LIBC
+ ;;
+ x86_64:[Mm]anagarm:*:*|i?86:[Mm]anagarm:*:*)
+ GUESS="$UNAME_MACHINE-pc-managarm-mlibc"
+ ;;
+ *:[Mm]anagarm:*:*)
+ GUESS="$UNAME_MACHINE-unknown-managarm-mlibc"
+ ;;
+ *:Minix:*:*)
+ GUESS=$UNAME_MACHINE-unknown-minix
+ ;;
+ aarch64:Linux:*:*)
+ set_cc_for_build
+ CPU=$UNAME_MACHINE
+ LIBCABI=$LIBC
+ if test "$CC_FOR_BUILD" != no_compiler_found; then
+ ABI=64
+ sed 's/^ //' << EOF > "$dummy.c"
+ #ifdef __ARM_EABI__
+ #ifdef __ARM_PCS_VFP
+ ABI=eabihf
+ #else
+ ABI=eabi
+ #endif
+ #endif
+EOF
+ cc_set_abi=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^ABI' | sed 's, ,,g'`
+ eval "$cc_set_abi"
+ case $ABI in
+ eabi | eabihf) CPU=armv8l; LIBCABI=$LIBC$ABI ;;
+ esac
+ fi
+ GUESS=$CPU-unknown-linux-$LIBCABI
+ ;;
+ aarch64_be:Linux:*:*)
+ UNAME_MACHINE=aarch64_be
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ alpha:Linux:*:*)
+ case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' /proc/cpuinfo 2>/dev/null` in
+ EV5) UNAME_MACHINE=alphaev5 ;;
+ EV56) UNAME_MACHINE=alphaev56 ;;
+ PCA56) UNAME_MACHINE=alphapca56 ;;
+ PCA57) UNAME_MACHINE=alphapca56 ;;
+ EV6) UNAME_MACHINE=alphaev6 ;;
+ EV67) UNAME_MACHINE=alphaev67 ;;
+ EV68*) UNAME_MACHINE=alphaev68 ;;
+ esac
+ objdump --private-headers /bin/sh | grep -q ld.so.1
+ if test "$?" = 0 ; then LIBC=gnulibc1 ; fi
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ arc:Linux:*:* | arceb:Linux:*:* | arc32:Linux:*:* | arc64:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ arm*:Linux:*:*)
+ set_cc_for_build
+ if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_EABI__
+ then
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ else
+ if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_PCS_VFP
+ then
+ GUESS=$UNAME_MACHINE-unknown-linux-${LIBC}eabi
+ else
+ GUESS=$UNAME_MACHINE-unknown-linux-${LIBC}eabihf
+ fi
+ fi
+ ;;
+ avr32*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ cris:Linux:*:*)
+ GUESS=$UNAME_MACHINE-axis-linux-$LIBC
+ ;;
+ crisv32:Linux:*:*)
+ GUESS=$UNAME_MACHINE-axis-linux-$LIBC
+ ;;
+ e2k:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ frv:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ hexagon:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ i*86:Linux:*:*)
+ GUESS=$UNAME_MACHINE-pc-linux-$LIBC
+ ;;
+ ia64:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ k1om:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ kvx:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ kvx:cos:*:*)
+ GUESS=$UNAME_MACHINE-unknown-cos
+ ;;
+ kvx:mbr:*:*)
+ GUESS=$UNAME_MACHINE-unknown-mbr
+ ;;
+ loongarch32:Linux:*:* | loongarch64:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ m32r*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ m68*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ mips:Linux:*:* | mips64:Linux:*:*)
+ set_cc_for_build
+ IS_GLIBC=0
+ test x"${LIBC}" = xgnu && IS_GLIBC=1
+ sed 's/^ //' << EOF > "$dummy.c"
+ #undef CPU
+ #undef mips
+ #undef mipsel
+ #undef mips64
+ #undef mips64el
+ #if ${IS_GLIBC} && defined(_ABI64)
+ LIBCABI=gnuabi64
+ #else
+ #if ${IS_GLIBC} && defined(_ABIN32)
+ LIBCABI=gnuabin32
+ #else
+ LIBCABI=${LIBC}
+ #endif
+ #endif
+
+ #if ${IS_GLIBC} && defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
+ CPU=mipsisa64r6
+ #else
+ #if ${IS_GLIBC} && !defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
+ CPU=mipsisa32r6
+ #else
+ #if defined(__mips64)
+ CPU=mips64
+ #else
+ CPU=mips
+ #endif
+ #endif
+ #endif
+
+ #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+ MIPS_ENDIAN=el
+ #else
+ #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+ MIPS_ENDIAN=
+ #else
+ MIPS_ENDIAN=
+ #endif
+ #endif
+EOF
+ cc_set_vars=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU\|^MIPS_ENDIAN\|^LIBCABI'`
+ eval "$cc_set_vars"
+ test "x$CPU" != x && { echo "$CPU${MIPS_ENDIAN}-unknown-linux-$LIBCABI"; exit; }
+ ;;
+ mips64el:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ openrisc*:Linux:*:*)
+ GUESS=or1k-unknown-linux-$LIBC
+ ;;
+ or32:Linux:*:* | or1k*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ padre:Linux:*:*)
+ GUESS=sparc-unknown-linux-$LIBC
+ ;;
+ parisc64:Linux:*:* | hppa64:Linux:*:*)
+ GUESS=hppa64-unknown-linux-$LIBC
+ ;;
+ parisc:Linux:*:* | hppa:Linux:*:*)
+ # Look for CPU level
+ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+ PA7*) GUESS=hppa1.1-unknown-linux-$LIBC ;;
+ PA8*) GUESS=hppa2.0-unknown-linux-$LIBC ;;
+ *) GUESS=hppa-unknown-linux-$LIBC ;;
+ esac
+ ;;
+ ppc64:Linux:*:*)
+ GUESS=powerpc64-unknown-linux-$LIBC
+ ;;
+ ppc:Linux:*:*)
+ GUESS=powerpc-unknown-linux-$LIBC
+ ;;
+ ppc64le:Linux:*:*)
+ GUESS=powerpc64le-unknown-linux-$LIBC
+ ;;
+ ppcle:Linux:*:*)
+ GUESS=powerpcle-unknown-linux-$LIBC
+ ;;
+ riscv32:Linux:*:* | riscv32be:Linux:*:* | riscv64:Linux:*:* | riscv64be:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ s390:Linux:*:* | s390x:Linux:*:*)
+ GUESS=$UNAME_MACHINE-ibm-linux-$LIBC
+ ;;
+ sh64*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ sh*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ sparc:Linux:*:* | sparc64:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ tile*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ vax:Linux:*:*)
+ GUESS=$UNAME_MACHINE-dec-linux-$LIBC
+ ;;
+ x86_64:Linux:*:*)
+ set_cc_for_build
+ CPU=$UNAME_MACHINE
+ LIBCABI=$LIBC
+ if test "$CC_FOR_BUILD" != no_compiler_found; then
+ ABI=64
+ sed 's/^ //' << EOF > "$dummy.c"
+ #ifdef __i386__
+ ABI=x86
+ #else
+ #ifdef __ILP32__
+ ABI=x32
+ #endif
+ #endif
+EOF
+ cc_set_abi=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^ABI' | sed 's, ,,g'`
+ eval "$cc_set_abi"
+ case $ABI in
+ x86) CPU=i686 ;;
+ x32) LIBCABI=${LIBC}x32 ;;
+ esac
+ fi
+ GUESS=$CPU-pc-linux-$LIBCABI
+ ;;
+ xtensa*:Linux:*:*)
+ GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+ ;;
+ i*86:DYNIX/ptx:4*:*)
+ # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+ # earlier versions are messed up and put the nodename in both
+ # sysname and nodename.
+ GUESS=i386-sequent-sysv4
+ ;;
+ i*86:UNIX_SV:4.2MP:2.*)
+ # Unixware is an offshoot of SVR4, but it has its own version
+ # number series starting with 2...
+ # I am not positive that other SVR4 systems won't match this,
+ # I just have to hope. -- rms.
+ # Use sysv4.2uw... so that sysv4* matches it.
+ GUESS=$UNAME_MACHINE-pc-sysv4.2uw$UNAME_VERSION
+ ;;
+ i*86:OS/2:*:*)
+ # If we were able to find 'uname', then EMX Unix compatibility
+ # is probably installed.
+ GUESS=$UNAME_MACHINE-pc-os2-emx
+ ;;
+ i*86:XTS-300:*:STOP)
+ GUESS=$UNAME_MACHINE-unknown-stop
+ ;;
+ i*86:atheos:*:*)
+ GUESS=$UNAME_MACHINE-unknown-atheos
+ ;;
+ i*86:syllable:*:*)
+ GUESS=$UNAME_MACHINE-pc-syllable
+ ;;
+ i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+ GUESS=i386-unknown-lynxos$UNAME_RELEASE
+ ;;
+ i*86:*DOS:*:*)
+ GUESS=$UNAME_MACHINE-pc-msdosdjgpp
+ ;;
+ i*86:*:4.*:*)
+ UNAME_REL=`echo "$UNAME_RELEASE" | sed 's/\/MP$//'`
+ if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+ GUESS=$UNAME_MACHINE-univel-sysv$UNAME_REL
+ else
+ GUESS=$UNAME_MACHINE-pc-sysv$UNAME_REL
+ fi
+ ;;
+ i*86:*:5:[678]*)
+ # UnixWare 7.x, OpenUNIX and OpenServer 6.
+ case `/bin/uname -X | grep "^Machine"` in
+ *486*) UNAME_MACHINE=i486 ;;
+ *Pentium) UNAME_MACHINE=i586 ;;
+ *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+ esac
+ GUESS=$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+ ;;
+ i*86:*:3.2:*)
+ if test -f /usr/options/cb.name; then
+ UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+ GUESS=$UNAME_MACHINE-pc-isc$UNAME_REL
+ elif /bin/uname -X 2>/dev/null >/dev/null ; then
+ UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+ (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+ (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+ && UNAME_MACHINE=i586
+ (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+ && UNAME_MACHINE=i686
+ (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+ && UNAME_MACHINE=i686
+ GUESS=$UNAME_MACHINE-pc-sco$UNAME_REL
+ else
+ GUESS=$UNAME_MACHINE-pc-sysv32
+ fi
+ ;;
+ pc:*:*:*)
+ # Left here for compatibility:
+ # uname -m prints for DJGPP always 'pc', but it prints nothing about
+ # the processor, so we play safe by assuming i586.
+ # Note: whatever this is, it MUST be the same as what config.sub
+ # prints for the "djgpp" host, or else GDB configure will decide that
+ # this is a cross-build.
+ GUESS=i586-pc-msdosdjgpp
+ ;;
+ Intel:Mach:3*:*)
+ GUESS=i386-pc-mach3
+ ;;
+ paragon:*:*:*)
+ GUESS=i860-intel-osf1
+ ;;
+ i860:*:4.*:*) # i860-SVR4
+ if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+ GUESS=i860-stardent-sysv$UNAME_RELEASE # Stardent Vistra i860-SVR4
+ else # Add other i860-SVR4 vendors below as they are discovered.
+ GUESS=i860-unknown-sysv$UNAME_RELEASE # Unknown i860-SVR4
+ fi
+ ;;
+ mini*:CTIX:SYS*5:*)
+ # "miniframe"
+ GUESS=m68010-convergent-sysv
+ ;;
+ mc68k:UNIX:SYSTEM5:3.51m)
+ GUESS=m68k-convergent-sysv
+ ;;
+ M680?0:D-NIX:5.3:*)
+ GUESS=m68k-diab-dnix
+ ;;
+ M68*:*:R3V[5678]*:*)
+ test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+ 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+ OS_REL=''
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+ 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4; exit; } ;;
+ NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+ OS_REL='.3'
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+ && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+ m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+ GUESS=m68k-unknown-lynxos$UNAME_RELEASE
+ ;;
+ mc68030:UNIX_System_V:4.*:*)
+ GUESS=m68k-atari-sysv4
+ ;;
+ TSUNAMI:LynxOS:2.*:*)
+ GUESS=sparc-unknown-lynxos$UNAME_RELEASE
+ ;;
+ rs6000:LynxOS:2.*:*)
+ GUESS=rs6000-unknown-lynxos$UNAME_RELEASE
+ ;;
+ PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+ GUESS=powerpc-unknown-lynxos$UNAME_RELEASE
+ ;;
+ SM[BE]S:UNIX_SV:*:*)
+ GUESS=mips-dde-sysv$UNAME_RELEASE
+ ;;
+ RM*:ReliantUNIX-*:*:*)
+ GUESS=mips-sni-sysv4
+ ;;
+ RM*:SINIX-*:*:*)
+ GUESS=mips-sni-sysv4
+ ;;
+ *:SINIX-*:*:*)
+ if uname -p 2>/dev/null >/dev/null ; then
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ GUESS=$UNAME_MACHINE-sni-sysv4
+ else
+ GUESS=ns32k-sni-sysv
+ fi
+ ;;
+ PENTIUM:*:4.0*:*) # Unisys 'ClearPath HMP IX 4000' SVR4/MP effort
+ # says <Richard.M.Bartel@ccMail.Census.GOV>
+ GUESS=i586-unisys-sysv4
+ ;;
+ *:UNIX_System_V:4*:FTX*)
+ # From Gerald Hewes <hewes@openmarket.com>.
+ # How about differentiating between stratus architectures? -djm
+ GUESS=hppa1.1-stratus-sysv4
+ ;;
+ *:*:*:FTX*)
+ # From seanf@swdc.stratus.com.
+ GUESS=i860-stratus-sysv4
+ ;;
+ i*86:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ GUESS=$UNAME_MACHINE-stratus-vos
+ ;;
+ *:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ GUESS=hppa1.1-stratus-vos
+ ;;
+ mc68*:A/UX:*:*)
+ GUESS=m68k-apple-aux$UNAME_RELEASE
+ ;;
+ news*:NEWS-OS:6*:*)
+ GUESS=mips-sony-newsos6
+ ;;
+ R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+ if test -d /usr/nec; then
+ GUESS=mips-nec-sysv$UNAME_RELEASE
+ else
+ GUESS=mips-unknown-sysv$UNAME_RELEASE
+ fi
+ ;;
+ BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
+ GUESS=powerpc-be-beos
+ ;;
+ BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
+ GUESS=powerpc-apple-beos
+ ;;
+ BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
+ GUESS=i586-pc-beos
+ ;;
+ BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
+ GUESS=i586-pc-haiku
+ ;;
+ ppc:Haiku:*:*) # Haiku running on Apple PowerPC
+ GUESS=powerpc-apple-haiku
+ ;;
+ *:Haiku:*:*) # Haiku modern gcc (not bound by BeOS compat)
+ GUESS=$UNAME_MACHINE-unknown-haiku
+ ;;
+ SX-4:SUPER-UX:*:*)
+ GUESS=sx4-nec-superux$UNAME_RELEASE
+ ;;
+ SX-5:SUPER-UX:*:*)
+ GUESS=sx5-nec-superux$UNAME_RELEASE
+ ;;
+ SX-6:SUPER-UX:*:*)
+ GUESS=sx6-nec-superux$UNAME_RELEASE
+ ;;
+ SX-7:SUPER-UX:*:*)
+ GUESS=sx7-nec-superux$UNAME_RELEASE
+ ;;
+ SX-8:SUPER-UX:*:*)
+ GUESS=sx8-nec-superux$UNAME_RELEASE
+ ;;
+ SX-8R:SUPER-UX:*:*)
+ GUESS=sx8r-nec-superux$UNAME_RELEASE
+ ;;
+ SX-ACE:SUPER-UX:*:*)
+ GUESS=sxace-nec-superux$UNAME_RELEASE
+ ;;
+ Power*:Rhapsody:*:*)
+ GUESS=powerpc-apple-rhapsody$UNAME_RELEASE
+ ;;
+ *:Rhapsody:*:*)
+ GUESS=$UNAME_MACHINE-apple-rhapsody$UNAME_RELEASE
+ ;;
+ arm64:Darwin:*:*)
+ GUESS=aarch64-apple-darwin$UNAME_RELEASE
+ ;;
+ *:Darwin:*:*)
+ UNAME_PROCESSOR=`uname -p`
+ case $UNAME_PROCESSOR in
+ unknown) UNAME_PROCESSOR=powerpc ;;
+ esac
+ if command -v xcode-select > /dev/null 2> /dev/null && \
+ ! xcode-select --print-path > /dev/null 2> /dev/null ; then
+ # Avoid executing cc if there is no toolchain installed as
+ # cc will be a stub that puts up a graphical alert
+ # prompting the user to install developer tools.
+ CC_FOR_BUILD=no_compiler_found
+ else
+ set_cc_for_build
+ fi
+ if test "$CC_FOR_BUILD" != no_compiler_found; then
+ if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ case $UNAME_PROCESSOR in
+ i386) UNAME_PROCESSOR=x86_64 ;;
+ powerpc) UNAME_PROCESSOR=powerpc64 ;;
+ esac
+ fi
+ # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc
+ if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \
+ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_PPC >/dev/null
+ then
+ UNAME_PROCESSOR=powerpc
+ fi
+ elif test "$UNAME_PROCESSOR" = i386 ; then
+ # uname -m returns i386 or x86_64
+ UNAME_PROCESSOR=$UNAME_MACHINE
+ fi
+ GUESS=$UNAME_PROCESSOR-apple-darwin$UNAME_RELEASE
+ ;;
+ *:procnto*:*:* | *:QNX:[0123456789]*:*)
+ UNAME_PROCESSOR=`uname -p`
+ if test "$UNAME_PROCESSOR" = x86; then
+ UNAME_PROCESSOR=i386
+ UNAME_MACHINE=pc
+ fi
+ GUESS=$UNAME_PROCESSOR-$UNAME_MACHINE-nto-qnx$UNAME_RELEASE
+ ;;
+ *:QNX:*:4*)
+ GUESS=i386-pc-qnx
+ ;;
+ NEO-*:NONSTOP_KERNEL:*:*)
+ GUESS=neo-tandem-nsk$UNAME_RELEASE
+ ;;
+ NSE-*:NONSTOP_KERNEL:*:*)
+ GUESS=nse-tandem-nsk$UNAME_RELEASE
+ ;;
+ NSR-*:NONSTOP_KERNEL:*:*)
+ GUESS=nsr-tandem-nsk$UNAME_RELEASE
+ ;;
+ NSV-*:NONSTOP_KERNEL:*:*)
+ GUESS=nsv-tandem-nsk$UNAME_RELEASE
+ ;;
+ NSX-*:NONSTOP_KERNEL:*:*)
+ GUESS=nsx-tandem-nsk$UNAME_RELEASE
+ ;;
+ *:NonStop-UX:*:*)
+ GUESS=mips-compaq-nonstopux
+ ;;
+ BS2000:POSIX*:*:*)
+ GUESS=bs2000-siemens-sysv
+ ;;
+ DS/*:UNIX_System_V:*:*)
+ GUESS=$UNAME_MACHINE-$UNAME_SYSTEM-$UNAME_RELEASE
+ ;;
+ *:Plan9:*:*)
+ # "uname -m" is not consistent, so use $cputype instead. 386
+ # is converted to i386 for consistency with other x86
+ # operating systems.
+ if test "${cputype-}" = 386; then
+ UNAME_MACHINE=i386
+ elif test "x${cputype-}" != x; then
+ UNAME_MACHINE=$cputype
+ fi
+ GUESS=$UNAME_MACHINE-unknown-plan9
+ ;;
+ *:TOPS-10:*:*)
+ GUESS=pdp10-unknown-tops10
+ ;;
+ *:TENEX:*:*)
+ GUESS=pdp10-unknown-tenex
+ ;;
+ KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+ GUESS=pdp10-dec-tops20
+ ;;
+ XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+ GUESS=pdp10-xkl-tops20
+ ;;
+ *:TOPS-20:*:*)
+ GUESS=pdp10-unknown-tops20
+ ;;
+ *:ITS:*:*)
+ GUESS=pdp10-unknown-its
+ ;;
+ SEI:*:*:SEIUX)
+ GUESS=mips-sei-seiux$UNAME_RELEASE
+ ;;
+ *:DragonFly:*:*)
+ DRAGONFLY_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+ GUESS=$UNAME_MACHINE-unknown-dragonfly$DRAGONFLY_REL
+ ;;
+ *:*VMS:*:*)
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ case $UNAME_MACHINE in
+ A*) GUESS=alpha-dec-vms ;;
+ I*) GUESS=ia64-dec-vms ;;
+ V*) GUESS=vax-dec-vms ;;
+ esac ;;
+ *:XENIX:*:SysV)
+ GUESS=i386-pc-xenix
+ ;;
+ i*86:skyos:*:*)
+ SKYOS_REL=`echo "$UNAME_RELEASE" | sed -e 's/ .*$//'`
+ GUESS=$UNAME_MACHINE-pc-skyos$SKYOS_REL
+ ;;
+ i*86:rdos:*:*)
+ GUESS=$UNAME_MACHINE-pc-rdos
+ ;;
+ i*86:Fiwix:*:*)
+ GUESS=$UNAME_MACHINE-pc-fiwix
+ ;;
+ *:AROS:*:*)
+ GUESS=$UNAME_MACHINE-unknown-aros
+ ;;
+ x86_64:VMkernel:*:*)
+ GUESS=$UNAME_MACHINE-unknown-esx
+ ;;
+ amd64:Isilon\ OneFS:*:*)
+ GUESS=x86_64-unknown-onefs
+ ;;
+ *:Unleashed:*:*)
+ GUESS=$UNAME_MACHINE-unknown-unleashed$UNAME_RELEASE
+ ;;
+esac
+
+# Do we have a guess based on uname results?
+if test "x$GUESS" != x; then
+ echo "$GUESS"
+ exit
+fi
+
+# No uname command or uname output not recognized.
+set_cc_for_build
+cat > "$dummy.c" <<EOF
+#ifdef _SEQUENT_
+#include <sys/types.h>
+#include <sys/utsname.h>
+#endif
+#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
+#if defined (vax) || defined (__vax) || defined (__vax__) || defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
+#include <signal.h>
+#if defined(_SIZE_T_) || defined(SIGLOST)
+#include <sys/utsname.h>
+#endif
+#endif
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+ /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
+ I don't know.... */
+ printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+ printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+ "4"
+#else
+ ""
+#endif
+ ); exit (0);
+#endif
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+ int version;
+ version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+ if (version < 4)
+ printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+ else
+ printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+ exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+ printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+ printf ("ns32k-encore-mach\n"); exit (0);
+#else
+ printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+ printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+ printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+ printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+ struct utsname un;
+
+ uname(&un);
+ if (strncmp(un.version, "V2", 2) == 0) {
+ printf ("i386-sequent-ptx2\n"); exit (0);
+ }
+ if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+ printf ("i386-sequent-ptx1\n"); exit (0);
+ }
+ printf ("i386-sequent-ptx\n"); exit (0);
+#endif
+
+#if defined (vax)
+#if !defined (ultrix)
+#include <sys/param.h>
+#if defined (BSD)
+#if BSD == 43
+ printf ("vax-dec-bsd4.3\n"); exit (0);
+#else
+#if BSD == 199006
+ printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#else
+ printf ("vax-dec-bsd\n"); exit (0);
+#endif
+#endif
+#else
+ printf ("vax-dec-bsd\n"); exit (0);
+#endif
+#else
+#if defined(_SIZE_T_) || defined(SIGLOST)
+ struct utsname un;
+ uname (&un);
+ printf ("vax-dec-ultrix%s\n", un.release); exit (0);
+#else
+ printf ("vax-dec-ultrix\n"); exit (0);
+#endif
+#endif
+#endif
+#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
+#if defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
+#if defined(_SIZE_T_) || defined(SIGLOST)
+ struct utsname *un;
+ uname (&un);
+ printf ("mips-dec-ultrix%s\n", un.release); exit (0);
+#else
+ printf ("mips-dec-ultrix\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (alliant) && defined (i860)
+ printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+ exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null && SYSTEM_NAME=`"$dummy"` &&
+ { echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+test -d /usr/apollo && { echo "$ISP-apollo-$SYSTYPE"; exit; }
+
+echo "$0: unable to guess system type" >&2
+
+case $UNAME_MACHINE:$UNAME_SYSTEM in
+ mips:Linux | mips64:Linux)
+ # If we got here on MIPS GNU/Linux, output extra information.
+ cat >&2 <<EOF
+
+NOTE: MIPS GNU/Linux systems require a C compiler to fully recognize
+the system type. Please install a C compiler and try again.
+EOF
+ ;;
+esac
+
+cat >&2 <<EOF
+
+This script (version $timestamp), has failed to recognize the
+operating system you are using. If your script is old, overwrite *all*
+copies of config.guess and config.sub with the latest versions from:
+
+ https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
+and
+ https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
+EOF
+
+our_year=`echo $timestamp | sed 's,-.*,,'`
+thisyear=`date +%Y`
+# shellcheck disable=SC2003
+script_age=`expr "$thisyear" - "$our_year"`
+if test "$script_age" -lt 3 ; then
+ cat >&2 <<EOF
+
+If $0 has already been updated, send the following data and any
+information you think might be pertinent to config-patches@gnu.org to
+provide the necessary information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo = `(hostinfo) 2>/dev/null`
+/bin/universe = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = "$UNAME_MACHINE"
+UNAME_RELEASE = "$UNAME_RELEASE"
+UNAME_SYSTEM = "$UNAME_SYSTEM"
+UNAME_VERSION = "$UNAME_VERSION"
+EOF
+fi
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/build-aux/config.sub b/build-aux/config.sub
new file mode 100755
index 0000000..defe52c
--- /dev/null
+++ b/build-aux/config.sub
@@ -0,0 +1,1960 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+# Copyright 1992-2023 Free Software Foundation, Inc.
+
+# shellcheck disable=SC2006,SC2268 # see below for rationale
+
+timestamp='2023-09-19'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program. This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+
+
+# Please send patches to <config-patches@gnu.org>.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support. The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+# The "shellcheck disable" line above the timestamp inhibits complaints
+# about features and limitations of the classic Bourne shell that were
+# superseded or lifted in POSIX. However, this script identifies a wide
+# variety of pre-POSIX systems that do not have POSIX shells at all, and
+# even some reasonably current systems (Solaris 10 as case-in-point) still
+# have a pre-POSIX /bin/sh.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS
+
+Canonicalize a configuration name.
+
+Options:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright 1992-2023 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try '$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help" >&2
+ exit 1 ;;
+
+ *local*)
+ # First pass through any local machine types.
+ echo "$1"
+ exit ;;
+
+ * )
+ break ;;
+ esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+ exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+ exit 1;;
+esac
+
+# Split fields of configuration type
+# shellcheck disable=SC2162
+saved_IFS=$IFS
+IFS="-" read field1 field2 field3 field4 <<EOF
+$1
+EOF
+IFS=$saved_IFS
+
+# Separate into logical components for further validation
+case $1 in
+ *-*-*-*-*)
+ echo "Invalid configuration '$1': more than four components" >&2
+ exit 1
+ ;;
+ *-*-*-*)
+ basic_machine=$field1-$field2
+ basic_os=$field3-$field4
+ ;;
+ *-*-*)
+ # Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two
+ # parts
+ maybe_os=$field2-$field3
+ case $maybe_os in
+ nto-qnx* | linux-* | uclinux-uclibc* \
+ | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \
+ | netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \
+ | storm-chaos* | os2-emx* | rtmk-nova* | managarm-* \
+ | windows-* )
+ basic_machine=$field1
+ basic_os=$maybe_os
+ ;;
+ android-linux)
+ basic_machine=$field1-unknown
+ basic_os=linux-android
+ ;;
+ *)
+ basic_machine=$field1-$field2
+ basic_os=$field3
+ ;;
+ esac
+ ;;
+ *-*)
+ # A lone config we happen to match not fitting any pattern
+ case $field1-$field2 in
+ decstation-3100)
+ basic_machine=mips-dec
+ basic_os=
+ ;;
+ *-*)
+ # Second component is usually, but not always the OS
+ case $field2 in
+ # Prevent following clause from handling this valid os
+ sun*os*)
+ basic_machine=$field1
+ basic_os=$field2
+ ;;
+ zephyr*)
+ basic_machine=$field1-unknown
+ basic_os=$field2
+ ;;
+ # Manufacturers
+ dec* | mips* | sequent* | encore* | pc533* | sgi* | sony* \
+ | att* | 7300* | 3300* | delta* | motorola* | sun[234]* \
+ | unicom* | ibm* | next | hp | isi* | apollo | altos* \
+ | convergent* | ncr* | news | 32* | 3600* | 3100* \
+ | hitachi* | c[123]* | convex* | sun | crds | omron* | dg \
+ | ultra | tti* | harris | dolphin | highlevel | gould \
+ | cbm | ns | masscomp | apple | axis | knuth | cray \
+ | microblaze* | sim | cisco \
+ | oki | wec | wrs | winbond)
+ basic_machine=$field1-$field2
+ basic_os=
+ ;;
+ *)
+ basic_machine=$field1
+ basic_os=$field2
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+ *)
+ # Convert single-component short-hands not valid as part of
+ # multi-component configurations.
+ case $field1 in
+ 386bsd)
+ basic_machine=i386-pc
+ basic_os=bsd
+ ;;
+ a29khif)
+ basic_machine=a29k-amd
+ basic_os=udi
+ ;;
+ adobe68k)
+ basic_machine=m68010-adobe
+ basic_os=scout
+ ;;
+ alliant)
+ basic_machine=fx80-alliant
+ basic_os=
+ ;;
+ altos | altos3068)
+ basic_machine=m68k-altos
+ basic_os=
+ ;;
+ am29k)
+ basic_machine=a29k-none
+ basic_os=bsd
+ ;;
+ amdahl)
+ basic_machine=580-amdahl
+ basic_os=sysv
+ ;;
+ amiga)
+ basic_machine=m68k-unknown
+ basic_os=
+ ;;
+ amigaos | amigados)
+ basic_machine=m68k-unknown
+ basic_os=amigaos
+ ;;
+ amigaunix | amix)
+ basic_machine=m68k-unknown
+ basic_os=sysv4
+ ;;
+ apollo68)
+ basic_machine=m68k-apollo
+ basic_os=sysv
+ ;;
+ apollo68bsd)
+ basic_machine=m68k-apollo
+ basic_os=bsd
+ ;;
+ aros)
+ basic_machine=i386-pc
+ basic_os=aros
+ ;;
+ aux)
+ basic_machine=m68k-apple
+ basic_os=aux
+ ;;
+ balance)
+ basic_machine=ns32k-sequent
+ basic_os=dynix
+ ;;
+ blackfin)
+ basic_machine=bfin-unknown
+ basic_os=linux
+ ;;
+ cegcc)
+ basic_machine=arm-unknown
+ basic_os=cegcc
+ ;;
+ convex-c1)
+ basic_machine=c1-convex
+ basic_os=bsd
+ ;;
+ convex-c2)
+ basic_machine=c2-convex
+ basic_os=bsd
+ ;;
+ convex-c32)
+ basic_machine=c32-convex
+ basic_os=bsd
+ ;;
+ convex-c34)
+ basic_machine=c34-convex
+ basic_os=bsd
+ ;;
+ convex-c38)
+ basic_machine=c38-convex
+ basic_os=bsd
+ ;;
+ cray)
+ basic_machine=j90-cray
+ basic_os=unicos
+ ;;
+ crds | unos)
+ basic_machine=m68k-crds
+ basic_os=
+ ;;
+ da30)
+ basic_machine=m68k-da30
+ basic_os=
+ ;;
+ decstation | pmax | pmin | dec3100 | decstatn)
+ basic_machine=mips-dec
+ basic_os=
+ ;;
+ delta88)
+ basic_machine=m88k-motorola
+ basic_os=sysv3
+ ;;
+ dicos)
+ basic_machine=i686-pc
+ basic_os=dicos
+ ;;
+ djgpp)
+ basic_machine=i586-pc
+ basic_os=msdosdjgpp
+ ;;
+ ebmon29k)
+ basic_machine=a29k-amd
+ basic_os=ebmon
+ ;;
+ es1800 | OSE68k | ose68k | ose | OSE)
+ basic_machine=m68k-ericsson
+ basic_os=ose
+ ;;
+ gmicro)
+ basic_machine=tron-gmicro
+ basic_os=sysv
+ ;;
+ go32)
+ basic_machine=i386-pc
+ basic_os=go32
+ ;;
+ h8300hms)
+ basic_machine=h8300-hitachi
+ basic_os=hms
+ ;;
+ h8300xray)
+ basic_machine=h8300-hitachi
+ basic_os=xray
+ ;;
+ h8500hms)
+ basic_machine=h8500-hitachi
+ basic_os=hms
+ ;;
+ harris)
+ basic_machine=m88k-harris
+ basic_os=sysv3
+ ;;
+ hp300 | hp300hpux)
+ basic_machine=m68k-hp
+ basic_os=hpux
+ ;;
+ hp300bsd)
+ basic_machine=m68k-hp
+ basic_os=bsd
+ ;;
+ hppaosf)
+ basic_machine=hppa1.1-hp
+ basic_os=osf
+ ;;
+ hppro)
+ basic_machine=hppa1.1-hp
+ basic_os=proelf
+ ;;
+ i386mach)
+ basic_machine=i386-mach
+ basic_os=mach
+ ;;
+ isi68 | isi)
+ basic_machine=m68k-isi
+ basic_os=sysv
+ ;;
+ m68knommu)
+ basic_machine=m68k-unknown
+ basic_os=linux
+ ;;
+ magnum | m3230)
+ basic_machine=mips-mips
+ basic_os=sysv
+ ;;
+ merlin)
+ basic_machine=ns32k-utek
+ basic_os=sysv
+ ;;
+ mingw64)
+ basic_machine=x86_64-pc
+ basic_os=mingw64
+ ;;
+ mingw32)
+ basic_machine=i686-pc
+ basic_os=mingw32
+ ;;
+ mingw32ce)
+ basic_machine=arm-unknown
+ basic_os=mingw32ce
+ ;;
+ monitor)
+ basic_machine=m68k-rom68k
+ basic_os=coff
+ ;;
+ morphos)
+ basic_machine=powerpc-unknown
+ basic_os=morphos
+ ;;
+ moxiebox)
+ basic_machine=moxie-unknown
+ basic_os=moxiebox
+ ;;
+ msdos)
+ basic_machine=i386-pc
+ basic_os=msdos
+ ;;
+ msys)
+ basic_machine=i686-pc
+ basic_os=msys
+ ;;
+ mvs)
+ basic_machine=i370-ibm
+ basic_os=mvs
+ ;;
+ nacl)
+ basic_machine=le32-unknown
+ basic_os=nacl
+ ;;
+ ncr3000)
+ basic_machine=i486-ncr
+ basic_os=sysv4
+ ;;
+ netbsd386)
+ basic_machine=i386-pc
+ basic_os=netbsd
+ ;;
+ netwinder)
+ basic_machine=armv4l-rebel
+ basic_os=linux
+ ;;
+ news | news700 | news800 | news900)
+ basic_machine=m68k-sony
+ basic_os=newsos
+ ;;
+ news1000)
+ basic_machine=m68030-sony
+ basic_os=newsos
+ ;;
+ necv70)
+ basic_machine=v70-nec
+ basic_os=sysv
+ ;;
+ nh3000)
+ basic_machine=m68k-harris
+ basic_os=cxux
+ ;;
+ nh[45]000)
+ basic_machine=m88k-harris
+ basic_os=cxux
+ ;;
+ nindy960)
+ basic_machine=i960-intel
+ basic_os=nindy
+ ;;
+ mon960)
+ basic_machine=i960-intel
+ basic_os=mon960
+ ;;
+ nonstopux)
+ basic_machine=mips-compaq
+ basic_os=nonstopux
+ ;;
+ os400)
+ basic_machine=powerpc-ibm
+ basic_os=os400
+ ;;
+ OSE68000 | ose68000)
+ basic_machine=m68000-ericsson
+ basic_os=ose
+ ;;
+ os68k)
+ basic_machine=m68k-none
+ basic_os=os68k
+ ;;
+ paragon)
+ basic_machine=i860-intel
+ basic_os=osf
+ ;;
+ parisc)
+ basic_machine=hppa-unknown
+ basic_os=linux
+ ;;
+ psp)
+ basic_machine=mipsallegrexel-sony
+ basic_os=psp
+ ;;
+ pw32)
+ basic_machine=i586-unknown
+ basic_os=pw32
+ ;;
+ rdos | rdos64)
+ basic_machine=x86_64-pc
+ basic_os=rdos
+ ;;
+ rdos32)
+ basic_machine=i386-pc
+ basic_os=rdos
+ ;;
+ rom68k)
+ basic_machine=m68k-rom68k
+ basic_os=coff
+ ;;
+ sa29200)
+ basic_machine=a29k-amd
+ basic_os=udi
+ ;;
+ sei)
+ basic_machine=mips-sei
+ basic_os=seiux
+ ;;
+ sequent)
+ basic_machine=i386-sequent
+ basic_os=
+ ;;
+ sps7)
+ basic_machine=m68k-bull
+ basic_os=sysv2
+ ;;
+ st2000)
+ basic_machine=m68k-tandem
+ basic_os=
+ ;;
+ stratus)
+ basic_machine=i860-stratus
+ basic_os=sysv4
+ ;;
+ sun2)
+ basic_machine=m68000-sun
+ basic_os=
+ ;;
+ sun2os3)
+ basic_machine=m68000-sun
+ basic_os=sunos3
+ ;;
+ sun2os4)
+ basic_machine=m68000-sun
+ basic_os=sunos4
+ ;;
+ sun3)
+ basic_machine=m68k-sun
+ basic_os=
+ ;;
+ sun3os3)
+ basic_machine=m68k-sun
+ basic_os=sunos3
+ ;;
+ sun3os4)
+ basic_machine=m68k-sun
+ basic_os=sunos4
+ ;;
+ sun4)
+ basic_machine=sparc-sun
+ basic_os=
+ ;;
+ sun4os3)
+ basic_machine=sparc-sun
+ basic_os=sunos3
+ ;;
+ sun4os4)
+ basic_machine=sparc-sun
+ basic_os=sunos4
+ ;;
+ sun4sol2)
+ basic_machine=sparc-sun
+ basic_os=solaris2
+ ;;
+ sun386 | sun386i | roadrunner)
+ basic_machine=i386-sun
+ basic_os=
+ ;;
+ sv1)
+ basic_machine=sv1-cray
+ basic_os=unicos
+ ;;
+ symmetry)
+ basic_machine=i386-sequent
+ basic_os=dynix
+ ;;
+ t3e)
+ basic_machine=alphaev5-cray
+ basic_os=unicos
+ ;;
+ t90)
+ basic_machine=t90-cray
+ basic_os=unicos
+ ;;
+ toad1)
+ basic_machine=pdp10-xkl
+ basic_os=tops20
+ ;;
+ tpf)
+ basic_machine=s390x-ibm
+ basic_os=tpf
+ ;;
+ udi29k)
+ basic_machine=a29k-amd
+ basic_os=udi
+ ;;
+ ultra3)
+ basic_machine=a29k-nyu
+ basic_os=sym1
+ ;;
+ v810 | necv810)
+ basic_machine=v810-nec
+ basic_os=none
+ ;;
+ vaxv)
+ basic_machine=vax-dec
+ basic_os=sysv
+ ;;
+ vms)
+ basic_machine=vax-dec
+ basic_os=vms
+ ;;
+ vsta)
+ basic_machine=i386-pc
+ basic_os=vsta
+ ;;
+ vxworks960)
+ basic_machine=i960-wrs
+ basic_os=vxworks
+ ;;
+ vxworks68)
+ basic_machine=m68k-wrs
+ basic_os=vxworks
+ ;;
+ vxworks29k)
+ basic_machine=a29k-wrs
+ basic_os=vxworks
+ ;;
+ xbox)
+ basic_machine=i686-pc
+ basic_os=mingw32
+ ;;
+ ymp)
+ basic_machine=ymp-cray
+ basic_os=unicos
+ ;;
+ *)
+ basic_machine=$1
+ basic_os=
+ ;;
+ esac
+ ;;
+esac
+
+# Decode 1-component or ad-hoc basic machines
+case $basic_machine in
+ # Here we handle the default manufacturer of certain CPU types. It is in
+ # some cases the only manufacturer, in others, it is the most popular.
+ w89k)
+ cpu=hppa1.1
+ vendor=winbond
+ ;;
+ op50n)
+ cpu=hppa1.1
+ vendor=oki
+ ;;
+ op60c)
+ cpu=hppa1.1
+ vendor=oki
+ ;;
+ ibm*)
+ cpu=i370
+ vendor=ibm
+ ;;
+ orion105)
+ cpu=clipper
+ vendor=highlevel
+ ;;
+ mac | mpw | mac-mpw)
+ cpu=m68k
+ vendor=apple
+ ;;
+ pmac | pmac-mpw)
+ cpu=powerpc
+ vendor=apple
+ ;;
+
+ # Recognize the various machine names and aliases which stand
+ # for a CPU type and a company and sometimes even an OS.
+ 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+ cpu=m68000
+ vendor=att
+ ;;
+ 3b*)
+ cpu=we32k
+ vendor=att
+ ;;
+ bluegene*)
+ cpu=powerpc
+ vendor=ibm
+ basic_os=cnk
+ ;;
+ decsystem10* | dec10*)
+ cpu=pdp10
+ vendor=dec
+ basic_os=tops10
+ ;;
+ decsystem20* | dec20*)
+ cpu=pdp10
+ vendor=dec
+ basic_os=tops20
+ ;;
+ delta | 3300 | motorola-3300 | motorola-delta \
+ | 3300-motorola | delta-motorola)
+ cpu=m68k
+ vendor=motorola
+ ;;
+ dpx2*)
+ cpu=m68k
+ vendor=bull
+ basic_os=sysv3
+ ;;
+ encore | umax | mmax)
+ cpu=ns32k
+ vendor=encore
+ ;;
+ elxsi)
+ cpu=elxsi
+ vendor=elxsi
+ basic_os=${basic_os:-bsd}
+ ;;
+ fx2800)
+ cpu=i860
+ vendor=alliant
+ ;;
+ genix)
+ cpu=ns32k
+ vendor=ns
+ ;;
+ h3050r* | hiux*)
+ cpu=hppa1.1
+ vendor=hitachi
+ basic_os=hiuxwe2
+ ;;
+ hp3k9[0-9][0-9] | hp9[0-9][0-9])
+ cpu=hppa1.0
+ vendor=hp
+ ;;
+ hp9k2[0-9][0-9] | hp9k31[0-9])
+ cpu=m68000
+ vendor=hp
+ ;;
+ hp9k3[2-9][0-9])
+ cpu=m68k
+ vendor=hp
+ ;;
+ hp9k6[0-9][0-9] | hp6[0-9][0-9])
+ cpu=hppa1.0
+ vendor=hp
+ ;;
+ hp9k7[0-79][0-9] | hp7[0-79][0-9])
+ cpu=hppa1.1
+ vendor=hp
+ ;;
+ hp9k78[0-9] | hp78[0-9])
+ # FIXME: really hppa2.0-hp
+ cpu=hppa1.1
+ vendor=hp
+ ;;
+ hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+ # FIXME: really hppa2.0-hp
+ cpu=hppa1.1
+ vendor=hp
+ ;;
+ hp9k8[0-9][13679] | hp8[0-9][13679])
+ cpu=hppa1.1
+ vendor=hp
+ ;;
+ hp9k8[0-9][0-9] | hp8[0-9][0-9])
+ cpu=hppa1.0
+ vendor=hp
+ ;;
+ i*86v32)
+ cpu=`echo "$1" | sed -e 's/86.*/86/'`
+ vendor=pc
+ basic_os=sysv32
+ ;;
+ i*86v4*)
+ cpu=`echo "$1" | sed -e 's/86.*/86/'`
+ vendor=pc
+ basic_os=sysv4
+ ;;
+ i*86v)
+ cpu=`echo "$1" | sed -e 's/86.*/86/'`
+ vendor=pc
+ basic_os=sysv
+ ;;
+ i*86sol2)
+ cpu=`echo "$1" | sed -e 's/86.*/86/'`
+ vendor=pc
+ basic_os=solaris2
+ ;;
+ j90 | j90-cray)
+ cpu=j90
+ vendor=cray
+ basic_os=${basic_os:-unicos}
+ ;;
+ iris | iris4d)
+ cpu=mips
+ vendor=sgi
+ case $basic_os in
+ irix*)
+ ;;
+ *)
+ basic_os=irix4
+ ;;
+ esac
+ ;;
+ miniframe)
+ cpu=m68000
+ vendor=convergent
+ ;;
+ *mint | mint[0-9]* | *MiNT | *MiNT[0-9]*)
+ cpu=m68k
+ vendor=atari
+ basic_os=mint
+ ;;
+ news-3600 | risc-news)
+ cpu=mips
+ vendor=sony
+ basic_os=newsos
+ ;;
+ next | m*-next)
+ cpu=m68k
+ vendor=next
+ case $basic_os in
+ openstep*)
+ ;;
+ nextstep*)
+ ;;
+ ns2*)
+ basic_os=nextstep2
+ ;;
+ *)
+ basic_os=nextstep3
+ ;;
+ esac
+ ;;
+ np1)
+ cpu=np1
+ vendor=gould
+ ;;
+ op50n-* | op60c-*)
+ cpu=hppa1.1
+ vendor=oki
+ basic_os=proelf
+ ;;
+ pa-hitachi)
+ cpu=hppa1.1
+ vendor=hitachi
+ basic_os=hiuxwe2
+ ;;
+ pbd)
+ cpu=sparc
+ vendor=tti
+ ;;
+ pbb)
+ cpu=m68k
+ vendor=tti
+ ;;
+ pc532)
+ cpu=ns32k
+ vendor=pc532
+ ;;
+ pn)
+ cpu=pn
+ vendor=gould
+ ;;
+ power)
+ cpu=power
+ vendor=ibm
+ ;;
+ ps2)
+ cpu=i386
+ vendor=ibm
+ ;;
+ rm[46]00)
+ cpu=mips
+ vendor=siemens
+ ;;
+ rtpc | rtpc-*)
+ cpu=romp
+ vendor=ibm
+ ;;
+ sde)
+ cpu=mipsisa32
+ vendor=sde
+ basic_os=${basic_os:-elf}
+ ;;
+ simso-wrs)
+ cpu=sparclite
+ vendor=wrs
+ basic_os=vxworks
+ ;;
+ tower | tower-32)
+ cpu=m68k
+ vendor=ncr
+ ;;
+ vpp*|vx|vx-*)
+ cpu=f301
+ vendor=fujitsu
+ ;;
+ w65)
+ cpu=w65
+ vendor=wdc
+ ;;
+ w89k-*)
+ cpu=hppa1.1
+ vendor=winbond
+ basic_os=proelf
+ ;;
+ none)
+ cpu=none
+ vendor=none
+ ;;
+ leon|leon[3-9])
+ cpu=sparc
+ vendor=$basic_machine
+ ;;
+ leon-*|leon[3-9]-*)
+ cpu=sparc
+ vendor=`echo "$basic_machine" | sed 's/-.*//'`
+ ;;
+
+ *-*)
+ # shellcheck disable=SC2162
+ saved_IFS=$IFS
+ IFS="-" read cpu vendor <<EOF
+$basic_machine
+EOF
+ IFS=$saved_IFS
+ ;;
+ # We use 'pc' rather than 'unknown'
+ # because (1) that's what they normally are, and
+ # (2) the word "unknown" tends to confuse beginning users.
+ i*86 | x86_64)
+ cpu=$basic_machine
+ vendor=pc
+ ;;
+ # These rules are duplicated from below for sake of the special case above;
+ # i.e. things that normalized to x86 arches should also default to "pc"
+ pc98)
+ cpu=i386
+ vendor=pc
+ ;;
+ x64 | amd64)
+ cpu=x86_64
+ vendor=pc
+ ;;
+ # Recognize the basic CPU types without company name.
+ *)
+ cpu=$basic_machine
+ vendor=unknown
+ ;;
+esac
+
+unset -v basic_machine
+
+# Decode basic machines in the full and proper CPU-Company form.
+case $cpu-$vendor in
+ # Here we handle the default manufacturer of certain CPU types in canonical form. It is in
+ # some cases the only manufacturer, in others, it is the most popular.
+ craynv-unknown)
+ vendor=cray
+ basic_os=${basic_os:-unicosmp}
+ ;;
+ c90-unknown | c90-cray)
+ vendor=cray
+ basic_os=${Basic_os:-unicos}
+ ;;
+ fx80-unknown)
+ vendor=alliant
+ ;;
+ romp-unknown)
+ vendor=ibm
+ ;;
+ mmix-unknown)
+ vendor=knuth
+ ;;
+ microblaze-unknown | microblazeel-unknown)
+ vendor=xilinx
+ ;;
+ rs6000-unknown)
+ vendor=ibm
+ ;;
+ vax-unknown)
+ vendor=dec
+ ;;
+ pdp11-unknown)
+ vendor=dec
+ ;;
+ we32k-unknown)
+ vendor=att
+ ;;
+ cydra-unknown)
+ vendor=cydrome
+ ;;
+ i370-ibm*)
+ vendor=ibm
+ ;;
+ orion-unknown)
+ vendor=highlevel
+ ;;
+ xps-unknown | xps100-unknown)
+ cpu=xps100
+ vendor=honeywell
+ ;;
+
+ # Here we normalize CPU types with a missing or matching vendor
+ armh-unknown | armh-alt)
+ cpu=armv7l
+ vendor=alt
+ basic_os=${basic_os:-linux-gnueabihf}
+ ;;
+ dpx20-unknown | dpx20-bull)
+ cpu=rs6000
+ vendor=bull
+ basic_os=${basic_os:-bosx}
+ ;;
+
+ # Here we normalize CPU types irrespective of the vendor
+ amd64-*)
+ cpu=x86_64
+ ;;
+ blackfin-*)
+ cpu=bfin
+ basic_os=linux
+ ;;
+ c54x-*)
+ cpu=tic54x
+ ;;
+ c55x-*)
+ cpu=tic55x
+ ;;
+ c6x-*)
+ cpu=tic6x
+ ;;
+ e500v[12]-*)
+ cpu=powerpc
+ basic_os=${basic_os}"spe"
+ ;;
+ mips3*-*)
+ cpu=mips64
+ ;;
+ ms1-*)
+ cpu=mt
+ ;;
+ m68knommu-*)
+ cpu=m68k
+ basic_os=linux
+ ;;
+ m9s12z-* | m68hcs12z-* | hcs12z-* | s12z-*)
+ cpu=s12z
+ ;;
+ openrisc-*)
+ cpu=or32
+ ;;
+ parisc-*)
+ cpu=hppa
+ basic_os=linux
+ ;;
+ pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+ cpu=i586
+ ;;
+ pentiumpro-* | p6-* | 6x86-* | athlon-* | athlon_*-*)
+ cpu=i686
+ ;;
+ pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+ cpu=i686
+ ;;
+ pentium4-*)
+ cpu=i786
+ ;;
+ pc98-*)
+ cpu=i386
+ ;;
+ ppc-* | ppcbe-*)
+ cpu=powerpc
+ ;;
+ ppcle-* | powerpclittle-*)
+ cpu=powerpcle
+ ;;
+ ppc64-*)
+ cpu=powerpc64
+ ;;
+ ppc64le-* | powerpc64little-*)
+ cpu=powerpc64le
+ ;;
+ sb1-*)
+ cpu=mipsisa64sb1
+ ;;
+ sb1el-*)
+ cpu=mipsisa64sb1el
+ ;;
+ sh5e[lb]-*)
+ cpu=`echo "$cpu" | sed 's/^\(sh.\)e\(.\)$/\1\2e/'`
+ ;;
+ spur-*)
+ cpu=spur
+ ;;
+ strongarm-* | thumb-*)
+ cpu=arm
+ ;;
+ tx39-*)
+ cpu=mipstx39
+ ;;
+ tx39el-*)
+ cpu=mipstx39el
+ ;;
+ x64-*)
+ cpu=x86_64
+ ;;
+ xscale-* | xscalee[bl]-*)
+ cpu=`echo "$cpu" | sed 's/^xscale/arm/'`
+ ;;
+ arm64-* | aarch64le-*)
+ cpu=aarch64
+ ;;
+
+ # Recognize the canonical CPU Types that limit and/or modify the
+ # company names they are paired with.
+ cr16-*)
+ basic_os=${basic_os:-elf}
+ ;;
+ crisv32-* | etraxfs*-*)
+ cpu=crisv32
+ vendor=axis
+ ;;
+ cris-* | etrax*-*)
+ cpu=cris
+ vendor=axis
+ ;;
+ crx-*)
+ basic_os=${basic_os:-elf}
+ ;;
+ neo-tandem)
+ cpu=neo
+ vendor=tandem
+ ;;
+ nse-tandem)
+ cpu=nse
+ vendor=tandem
+ ;;
+ nsr-tandem)
+ cpu=nsr
+ vendor=tandem
+ ;;
+ nsv-tandem)
+ cpu=nsv
+ vendor=tandem
+ ;;
+ nsx-tandem)
+ cpu=nsx
+ vendor=tandem
+ ;;
+ mipsallegrexel-sony)
+ cpu=mipsallegrexel
+ vendor=sony
+ ;;
+ tile*-*)
+ basic_os=${basic_os:-linux-gnu}
+ ;;
+
+ *)
+ # Recognize the canonical CPU types that are allowed with any
+ # company name.
+ case $cpu in
+ 1750a | 580 \
+ | a29k \
+ | aarch64 | aarch64_be | aarch64c | arm64ec \
+ | abacus \
+ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] \
+ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] \
+ | alphapca5[67] | alpha64pca5[67] \
+ | am33_2.0 \
+ | amdgcn \
+ | arc | arceb | arc32 | arc64 \
+ | arm | arm[lb]e | arme[lb] | armv* \
+ | avr | avr32 \
+ | asmjs \
+ | ba \
+ | be32 | be64 \
+ | bfin | bpf | bs2000 \
+ | c[123]* | c30 | [cjt]90 | c4x \
+ | c8051 | clipper | craynv | csky | cydra \
+ | d10v | d30v | dlx | dsp16xx \
+ | e2k | elxsi | epiphany \
+ | f30[01] | f700 | fido | fr30 | frv | ft32 | fx80 \
+ | javascript \
+ | h8300 | h8500 \
+ | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+ | hexagon \
+ | i370 | i*86 | i860 | i960 | ia16 | ia64 \
+ | ip2k | iq2000 \
+ | k1om \
+ | kvx \
+ | le32 | le64 \
+ | lm32 \
+ | loongarch32 | loongarch64 \
+ | m32c | m32r | m32rle \
+ | m5200 | m68000 | m680[012346]0 | m68360 | m683?2 | m68k \
+ | m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x \
+ | m88110 | m88k | maxq | mb | mcore | mep | metag \
+ | microblaze | microblazeel \
+ | mips* \
+ | mmix \
+ | mn10200 | mn10300 \
+ | moxie \
+ | mt \
+ | msp430 \
+ | nds32 | nds32le | nds32be \
+ | nfp \
+ | nios | nios2 | nios2eb | nios2el \
+ | none | np1 | ns16k | ns32k | nvptx \
+ | open8 \
+ | or1k* \
+ | or32 \
+ | orion \
+ | picochip \
+ | pdp10 | pdp11 | pj | pjl | pn | power \
+ | powerpc | powerpc64 | powerpc64le | powerpcle | powerpcspe \
+ | pru \
+ | pyramid \
+ | riscv | riscv32 | riscv32be | riscv64 | riscv64be \
+ | rl78 | romp | rs6000 | rx \
+ | s390 | s390x \
+ | score \
+ | sh | shl \
+ | sh[1234] | sh[24]a | sh[24]ae[lb] | sh[23]e | she[lb] | sh[lb]e \
+ | sh[1234]e[lb] | sh[12345][lb]e | sh[23]ele | sh64 | sh64le \
+ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet \
+ | sparclite \
+ | sparcv8 | sparcv9 | sparcv9b | sparcv9v | sv1 | sx* \
+ | spu \
+ | tahoe \
+ | thumbv7* \
+ | tic30 | tic4x | tic54x | tic55x | tic6x | tic80 \
+ | tron \
+ | ubicom32 \
+ | v70 | v850 | v850e | v850e1 | v850es | v850e2 | v850e2v3 \
+ | vax \
+ | visium \
+ | w65 \
+ | wasm32 | wasm64 \
+ | we32k \
+ | x86 | x86_64 | xc16x | xgate | xps100 \
+ | xstormy16 | xtensa* \
+ | ymp \
+ | z8k | z80)
+ ;;
+
+ *)
+ echo "Invalid configuration '$1': machine '$cpu-$vendor' not recognized" 1>&2
+ exit 1
+ ;;
+ esac
+ ;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $vendor in
+ digital*)
+ vendor=dec
+ ;;
+ commodore*)
+ vendor=cbm
+ ;;
+ *)
+ ;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if test x"$basic_os" != x
+then
+
+# First recognize some ad-hoc cases, or perhaps split kernel-os, or else just
+# set os.
+obj=
+case $basic_os in
+ gnu/linux*)
+ kernel=linux
+ os=`echo "$basic_os" | sed -e 's|gnu/linux|gnu|'`
+ ;;
+ os2-emx)
+ kernel=os2
+ os=`echo "$basic_os" | sed -e 's|os2-emx|emx|'`
+ ;;
+ nto-qnx*)
+ kernel=nto
+ os=`echo "$basic_os" | sed -e 's|nto-qnx|qnx|'`
+ ;;
+ *-*)
+ # shellcheck disable=SC2162
+ saved_IFS=$IFS
+ IFS="-" read kernel os <<EOF
+$basic_os
+EOF
+ IFS=$saved_IFS
+ ;;
+ # Default OS when just kernel was specified
+ nto*)
+ kernel=nto
+ os=`echo "$basic_os" | sed -e 's|nto|qnx|'`
+ ;;
+ linux*)
+ kernel=linux
+ os=`echo "$basic_os" | sed -e 's|linux|gnu|'`
+ ;;
+ managarm*)
+ kernel=managarm
+ os=`echo "$basic_os" | sed -e 's|managarm|mlibc|'`
+ ;;
+ *)
+ kernel=
+ os=$basic_os
+ ;;
+esac
+
+# Now, normalize the OS (knowing we just have one component, it's not a kernel,
+# etc.)
+case $os in
+ # First match some system type aliases that might get confused
+ # with valid system types.
+ # solaris* is a basic system type, with this one exception.
+ auroraux)
+ os=auroraux
+ ;;
+ bluegene*)
+ os=cnk
+ ;;
+ solaris1 | solaris1.*)
+ os=`echo "$os" | sed -e 's|solaris1|sunos4|'`
+ ;;
+ solaris)
+ os=solaris2
+ ;;
+ unixware*)
+ os=sysv4.2uw
+ ;;
+ # es1800 is here to avoid being matched by es* (a different OS)
+ es1800*)
+ os=ose
+ ;;
+ # Some version numbers need modification
+ chorusos*)
+ os=chorusos
+ ;;
+ isc)
+ os=isc2.2
+ ;;
+ sco6)
+ os=sco5v6
+ ;;
+ sco5)
+ os=sco3.2v5
+ ;;
+ sco4)
+ os=sco3.2v4
+ ;;
+ sco3.2.[4-9]*)
+ os=`echo "$os" | sed -e 's/sco3.2./sco3.2v/'`
+ ;;
+ sco*v* | scout)
+ # Don't match below
+ ;;
+ sco*)
+ os=sco3.2v2
+ ;;
+ psos*)
+ os=psos
+ ;;
+ qnx*)
+ os=qnx
+ ;;
+ hiux*)
+ os=hiuxwe2
+ ;;
+ lynx*178)
+ os=lynxos178
+ ;;
+ lynx*5)
+ os=lynxos5
+ ;;
+ lynxos*)
+ # don't get caught up in next wildcard
+ ;;
+ lynx*)
+ os=lynxos
+ ;;
+ mac[0-9]*)
+ os=`echo "$os" | sed -e 's|mac|macos|'`
+ ;;
+ opened*)
+ os=openedition
+ ;;
+ os400*)
+ os=os400
+ ;;
+ sunos5*)
+ os=`echo "$os" | sed -e 's|sunos5|solaris2|'`
+ ;;
+ sunos6*)
+ os=`echo "$os" | sed -e 's|sunos6|solaris3|'`
+ ;;
+ wince*)
+ os=wince
+ ;;
+ utek*)
+ os=bsd
+ ;;
+ dynix*)
+ os=bsd
+ ;;
+ acis*)
+ os=aos
+ ;;
+ atheos*)
+ os=atheos
+ ;;
+ syllable*)
+ os=syllable
+ ;;
+ 386bsd)
+ os=bsd
+ ;;
+ ctix* | uts*)
+ os=sysv
+ ;;
+ nova*)
+ os=rtmk-nova
+ ;;
+ ns2)
+ os=nextstep2
+ ;;
+ # Preserve the version number of sinix5.
+ sinix5.*)
+ os=`echo "$os" | sed -e 's|sinix|sysv|'`
+ ;;
+ sinix*)
+ os=sysv4
+ ;;
+ tpf*)
+ os=tpf
+ ;;
+ triton*)
+ os=sysv3
+ ;;
+ oss*)
+ os=sysv3
+ ;;
+ svr4*)
+ os=sysv4
+ ;;
+ svr3)
+ os=sysv3
+ ;;
+ sysvr4)
+ os=sysv4
+ ;;
+ ose*)
+ os=ose
+ ;;
+ *mint | mint[0-9]* | *MiNT | MiNT[0-9]*)
+ os=mint
+ ;;
+ dicos*)
+ os=dicos
+ ;;
+ pikeos*)
+ # Until real need of OS specific support for
+ # particular features comes up, bare metal
+ # configurations are quite functional.
+ case $cpu in
+ arm*)
+ os=eabi
+ ;;
+ *)
+ os=
+ obj=elf
+ ;;
+ esac
+ ;;
+ aout* | coff* | elf* | pe*)
+ # These are machine code file formats, not OSes
+ obj=$os
+ os=
+ ;;
+ *)
+ # No normalization, but not necessarily accepted, that comes below.
+ ;;
+esac
+
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system. Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+kernel=
+obj=
+case $cpu-$vendor in
+ score-*)
+ os=
+ obj=elf
+ ;;
+ spu-*)
+ os=
+ obj=elf
+ ;;
+ *-acorn)
+ os=riscix1.2
+ ;;
+ arm*-rebel)
+ kernel=linux
+ os=gnu
+ ;;
+ arm*-semi)
+ os=
+ obj=aout
+ ;;
+ c4x-* | tic4x-*)
+ os=
+ obj=coff
+ ;;
+ c8051-*)
+ os=
+ obj=elf
+ ;;
+ clipper-intergraph)
+ os=clix
+ ;;
+ hexagon-*)
+ os=
+ obj=elf
+ ;;
+ tic54x-*)
+ os=
+ obj=coff
+ ;;
+ tic55x-*)
+ os=
+ obj=coff
+ ;;
+ tic6x-*)
+ os=
+ obj=coff
+ ;;
+ # This must come before the *-dec entry.
+ pdp10-*)
+ os=tops20
+ ;;
+ pdp11-*)
+ os=none
+ ;;
+ *-dec | vax-*)
+ os=ultrix4.2
+ ;;
+ m68*-apollo)
+ os=domain
+ ;;
+ i386-sun)
+ os=sunos4.0.2
+ ;;
+ m68000-sun)
+ os=sunos3
+ ;;
+ m68*-cisco)
+ os=
+ obj=aout
+ ;;
+ mep-*)
+ os=
+ obj=elf
+ ;;
+ mips*-cisco)
+ os=
+ obj=elf
+ ;;
+ mips*-*)
+ os=
+ obj=elf
+ ;;
+ or32-*)
+ os=
+ obj=coff
+ ;;
+ *-tti) # must be before sparc entry or we get the wrong os.
+ os=sysv3
+ ;;
+ sparc-* | *-sun)
+ os=sunos4.1.1
+ ;;
+ pru-*)
+ os=
+ obj=elf
+ ;;
+ *-be)
+ os=beos
+ ;;
+ *-ibm)
+ os=aix
+ ;;
+ *-knuth)
+ os=mmixware
+ ;;
+ *-wec)
+ os=proelf
+ ;;
+ *-winbond)
+ os=proelf
+ ;;
+ *-oki)
+ os=proelf
+ ;;
+ *-hp)
+ os=hpux
+ ;;
+ *-hitachi)
+ os=hiux
+ ;;
+ i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+ os=sysv
+ ;;
+ *-cbm)
+ os=amigaos
+ ;;
+ *-dg)
+ os=dgux
+ ;;
+ *-dolphin)
+ os=sysv3
+ ;;
+ m68k-ccur)
+ os=rtu
+ ;;
+ m88k-omron*)
+ os=luna
+ ;;
+ *-next)
+ os=nextstep
+ ;;
+ *-sequent)
+ os=ptx
+ ;;
+ *-crds)
+ os=unos
+ ;;
+ *-ns)
+ os=genix
+ ;;
+ i370-*)
+ os=mvs
+ ;;
+ *-gould)
+ os=sysv
+ ;;
+ *-highlevel)
+ os=bsd
+ ;;
+ *-encore)
+ os=bsd
+ ;;
+ *-sgi)
+ os=irix
+ ;;
+ *-siemens)
+ os=sysv4
+ ;;
+ *-masscomp)
+ os=rtu
+ ;;
+ f30[01]-fujitsu | f700-fujitsu)
+ os=uxpv
+ ;;
+ *-rom68k)
+ os=
+ obj=coff
+ ;;
+ *-*bug)
+ os=
+ obj=coff
+ ;;
+ *-apple)
+ os=macos
+ ;;
+ *-atari*)
+ os=mint
+ ;;
+ *-wrs)
+ os=vxworks
+ ;;
+ *)
+ os=none
+ ;;
+esac
+
+fi
+
+# Now, validate our (potentially fixed-up) individual pieces (OS, OBJ).
+
+case $os in
+ # Sometimes we do "kernel-libc", so those need to count as OSes.
+ musl* | newlib* | relibc* | uclibc*)
+ ;;
+ # Likewise for "kernel-abi"
+ eabi* | gnueabi*)
+ ;;
+ # VxWorks passes extra cpu info in the 4th filed.
+ simlinux | simwindows | spe)
+ ;;
+ # See `case $cpu-$os` validation below
+ ghcjs)
+ ;;
+ # Now accept the basic system types.
+ # The portable systems comes first.
+ # Each alternative MUST end in a * to match a version number.
+ gnu* | android* | bsd* | mach* | minix* | genix* | ultrix* | irix* \
+ | *vms* | esix* | aix* | cnk* | sunos | sunos[34]* \
+ | hpux* | unos* | osf* | luna* | dgux* | auroraux* | solaris* \
+ | sym* | plan9* | psp* | sim* | xray* | os68k* | v88r* \
+ | hiux* | abug | nacl* | netware* | windows* \
+ | os9* | macos* | osx* | ios* | tvos* | watchos* \
+ | mpw* | magic* | mmixware* | mon960* | lnews* \
+ | amigaos* | amigados* | msdos* | newsos* | unicos* | aof* \
+ | aos* | aros* | cloudabi* | sortix* | twizzler* \
+ | nindy* | vxsim* | vxworks* | ebmon* | hms* | mvs* \
+ | clix* | riscos* | uniplus* | iris* | isc* | rtu* | xenix* \
+ | mirbsd* | netbsd* | dicos* | openedition* | ose* \
+ | bitrig* | openbsd* | secbsd* | solidbsd* | libertybsd* | os108* \
+ | ekkobsd* | freebsd* | riscix* | lynxos* | os400* \
+ | bosx* | nextstep* | cxux* | oabi* \
+ | ptx* | ecoff* | winnt* | domain* | vsta* \
+ | udi* | lites* | ieee* | go32* | aux* | hcos* \
+ | chorusrdb* | cegcc* | glidix* | serenity* \
+ | cygwin* | msys* | moss* | proelf* | rtems* \
+ | midipix* | mingw32* | mingw64* | mint* \
+ | uxpv* | beos* | mpeix* | udk* | moxiebox* \
+ | interix* | uwin* | mks* | rhapsody* | darwin* \
+ | openstep* | oskit* | conix* | pw32* | nonstopux* \
+ | storm-chaos* | tops10* | tenex* | tops20* | its* \
+ | os2* | vos* | palmos* | uclinux* | nucleus* | morphos* \
+ | scout* | superux* | sysv* | rtmk* | tpf* | windiss* \
+ | powermax* | dnix* | nx6 | nx7 | sei* | dragonfly* \
+ | skyos* | haiku* | rdos* | toppers* | drops* | es* \
+ | onefs* | tirtos* | phoenix* | fuchsia* | redox* | bme* \
+ | midnightbsd* | amdhsa* | unleashed* | emscripten* | wasi* \
+ | nsk* | powerunix* | genode* | zvmoe* | qnx* | emx* | zephyr* \
+ | fiwix* | mlibc* | cos* | mbr* )
+ ;;
+ # This one is extra strict with allowed versions
+ sco3.2v2 | sco3.2v[4-9]* | sco5v6*)
+ # Don't forget version if it is 3.2v4 or newer.
+ ;;
+ none)
+ ;;
+ kernel* | msvc* )
+ # Restricted further below
+ ;;
+ '')
+ if test x"$obj" = x
+ then
+ echo "Invalid configuration '$1': Blank OS only allowed with explicit machine code file format" 1>&2
+ fi
+ ;;
+ *)
+ echo "Invalid configuration '$1': OS '$os' not recognized" 1>&2
+ exit 1
+ ;;
+esac
+
+case $obj in
+ aout* | coff* | elf* | pe*)
+ ;;
+ '')
+ # empty is fine
+ ;;
+ *)
+ echo "Invalid configuration '$1': Machine code format '$obj' not recognized" 1>&2
+ exit 1
+ ;;
+esac
+
+# Here we handle the constraint that a (synthetic) cpu and os are
+# valid only in combination with each other and nowhere else.
+case $cpu-$os in
+ # The "javascript-unknown-ghcjs" triple is used by GHC; we
+ # accept it here in order to tolerate that, but reject any
+ # variations.
+ javascript-ghcjs)
+ ;;
+ javascript-* | *-ghcjs)
+ echo "Invalid configuration '$1': cpu '$cpu' is not valid with os '$os$obj'" 1>&2
+ exit 1
+ ;;
+esac
+
+# As a final step for OS-related things, validate the OS-kernel combination
+# (given a valid OS), if there is a kernel.
+case $kernel-$os-$obj in
+ linux-gnu*- | linux-dietlibc*- | linux-android*- | linux-newlib*- \
+ | linux-musl*- | linux-relibc*- | linux-uclibc*- | linux-mlibc*- )
+ ;;
+ uclinux-uclibc*- )
+ ;;
+ managarm-mlibc*- | managarm-kernel*- )
+ ;;
+ windows*-msvc*-)
+ ;;
+ -dietlibc*- | -newlib*- | -musl*- | -relibc*- | -uclibc*- | -mlibc*- )
+ # These are just libc implementations, not actual OSes, and thus
+ # require a kernel.
+ echo "Invalid configuration '$1': libc '$os' needs explicit kernel." 1>&2
+ exit 1
+ ;;
+ -kernel*- )
+ echo "Invalid configuration '$1': '$os' needs explicit kernel." 1>&2
+ exit 1
+ ;;
+ *-kernel*- )
+ echo "Invalid configuration '$1': '$kernel' does not support '$os'." 1>&2
+ exit 1
+ ;;
+ *-msvc*- )
+ echo "Invalid configuration '$1': '$os' needs 'windows'." 1>&2
+ exit 1
+ ;;
+ kfreebsd*-gnu*- | kopensolaris*-gnu*-)
+ ;;
+ vxworks-simlinux- | vxworks-simwindows- | vxworks-spe-)
+ ;;
+ nto-qnx*-)
+ ;;
+ os2-emx-)
+ ;;
+ *-eabi*- | *-gnueabi*-)
+ ;;
+ none--*)
+ # None (no kernel, i.e. freestanding / bare metal),
+ # can be paired with an machine code file format
+ ;;
+ -*-)
+ # Blank kernel with real OS is always fine.
+ ;;
+ --*)
+ # Blank kernel and OS with real machine code file format is always fine.
+ ;;
+ *-*-*)
+ echo "Invalid configuration '$1': Kernel '$kernel' not known to work with OS '$os'." 1>&2
+ exit 1
+ ;;
+esac
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer. We pick the logical manufacturer.
+case $vendor in
+ unknown)
+ case $cpu-$os in
+ *-riscix*)
+ vendor=acorn
+ ;;
+ *-sunos*)
+ vendor=sun
+ ;;
+ *-cnk* | *-aix*)
+ vendor=ibm
+ ;;
+ *-beos*)
+ vendor=be
+ ;;
+ *-hpux*)
+ vendor=hp
+ ;;
+ *-mpeix*)
+ vendor=hp
+ ;;
+ *-hiux*)
+ vendor=hitachi
+ ;;
+ *-unos*)
+ vendor=crds
+ ;;
+ *-dgux*)
+ vendor=dg
+ ;;
+ *-luna*)
+ vendor=omron
+ ;;
+ *-genix*)
+ vendor=ns
+ ;;
+ *-clix*)
+ vendor=intergraph
+ ;;
+ *-mvs* | *-opened*)
+ vendor=ibm
+ ;;
+ *-os400*)
+ vendor=ibm
+ ;;
+ s390-* | s390x-*)
+ vendor=ibm
+ ;;
+ *-ptx*)
+ vendor=sequent
+ ;;
+ *-tpf*)
+ vendor=ibm
+ ;;
+ *-vxsim* | *-vxworks* | *-windiss*)
+ vendor=wrs
+ ;;
+ *-aux*)
+ vendor=apple
+ ;;
+ *-hms*)
+ vendor=hitachi
+ ;;
+ *-mpw* | *-macos*)
+ vendor=apple
+ ;;
+ *-*mint | *-mint[0-9]* | *-*MiNT | *-MiNT[0-9]*)
+ vendor=atari
+ ;;
+ *-vos*)
+ vendor=stratus
+ ;;
+ esac
+ ;;
+esac
+
+echo "$cpu-$vendor${kernel:+-$kernel}${os:+-$os}${obj:+-$obj}"
+exit
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/build-aux/install-sh b/build-aux/install-sh
new file mode 100755
index 0000000..7c56c9c
--- /dev/null
+++ b/build-aux/install-sh
@@ -0,0 +1,541 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2023-11-23.18; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# 'make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+tab=' '
+nl='
+'
+IFS=" $tab$nl"
+
+# Set DOITPROG to "echo" to test this script.
+
+doit=${DOITPROG-}
+doit_exec=${doit:-exec}
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+# Create dirs (including intermediate dirs) using mode 755.
+# This is like GNU 'install' as of coreutils 8.32 (2020).
+mkdir_umask=22
+
+backupsuffix=
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+is_target_a_directory=possibly
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+ or: $0 [OPTION]... SRCFILES... DIRECTORY
+ or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+ or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+ --help display this help and exit.
+ --version display version info and exit.
+
+ -c (ignored)
+ -C install only if different (preserve data modification time)
+ -d create directories instead of installing files.
+ -g GROUP $chgrpprog installed files to GROUP.
+ -m MODE $chmodprog installed files to MODE.
+ -o USER $chownprog installed files to USER.
+ -p pass -p to $cpprog.
+ -s $stripprog installed files.
+ -S SUFFIX attempt to back up existing files, with suffix SUFFIX.
+ -t DIRECTORY install into DIRECTORY.
+ -T report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+ CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+ RMPROG STRIPPROG
+
+By default, rm is invoked with -f; when overridden with RMPROG,
+it's up to you to specify -f if you want it.
+
+If -S is not specified, no backups are attempted.
+
+Report bugs to <bug-automake@gnu.org>.
+GNU Automake home page: <https://www.gnu.org/software/automake/>.
+General help using GNU software: <https://www.gnu.org/gethelp/>."
+
+while test $# -ne 0; do
+ case $1 in
+ -c) ;;
+
+ -C) copy_on_change=true;;
+
+ -d) dir_arg=true;;
+
+ -g) chgrpcmd="$chgrpprog $2"
+ shift;;
+
+ --help) echo "$usage"; exit $?;;
+
+ -m) mode=$2
+ case $mode in
+ *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*)
+ echo "$0: invalid mode: $mode" >&2
+ exit 1;;
+ esac
+ shift;;
+
+ -o) chowncmd="$chownprog $2"
+ shift;;
+
+ -p) cpprog="$cpprog -p";;
+
+ -s) stripcmd=$stripprog;;
+
+ -S) backupsuffix="$2"
+ shift;;
+
+ -t)
+ is_target_a_directory=always
+ dst_arg=$2
+ # Protect names problematic for 'test' and other utilities.
+ case $dst_arg in
+ -* | [=\(\)!]) dst_arg=./$dst_arg;;
+ esac
+ shift;;
+
+ -T) is_target_a_directory=never;;
+
+ --version) echo "$0 $scriptversion"; exit $?;;
+
+ --) shift
+ break;;
+
+ -*) echo "$0: invalid option: $1" >&2
+ exit 1;;
+
+ *) break;;
+ esac
+ shift
+done
+
+# We allow the use of options -d and -T together, by making -d
+# take the precedence; this is for compatibility with GNU install.
+
+if test -n "$dir_arg"; then
+ if test -n "$dst_arg"; then
+ echo "$0: target directory not allowed when installing a directory." >&2
+ exit 1
+ fi
+fi
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+ # When -d is used, all remaining arguments are directories to create.
+ # When -t is used, the destination is already specified.
+ # Otherwise, the last argument is the destination. Remove it from $@.
+ for arg
+ do
+ if test -n "$dst_arg"; then
+ # $@ is not empty: it contains at least $arg.
+ set fnord "$@" "$dst_arg"
+ shift # fnord
+ fi
+ shift # arg
+ dst_arg=$arg
+ # Protect names problematic for 'test' and other utilities.
+ case $dst_arg in
+ -* | [=\(\)!]) dst_arg=./$dst_arg;;
+ esac
+ done
+fi
+
+if test $# -eq 0; then
+ if test -z "$dir_arg"; then
+ echo "$0: no input file specified." >&2
+ exit 1
+ fi
+ # It's OK to call 'install-sh -d' without argument.
+ # This can happen when creating conditional directories.
+ exit 0
+fi
+
+if test -z "$dir_arg"; then
+ if test $# -gt 1 || test "$is_target_a_directory" = always; then
+ if test ! -d "$dst_arg"; then
+ echo "$0: $dst_arg: Is not a directory." >&2
+ exit 1
+ fi
+ fi
+fi
+
+if test -z "$dir_arg"; then
+ do_exit='(exit $ret); exit $ret'
+ trap "ret=129; $do_exit" 1
+ trap "ret=130; $do_exit" 2
+ trap "ret=141; $do_exit" 13
+ trap "ret=143; $do_exit" 15
+
+ # Set umask so as not to create temps with too-generous modes.
+ # However, 'strip' requires both read and write access to temps.
+ case $mode in
+ # Optimize common cases.
+ *644) cp_umask=133;;
+ *755) cp_umask=22;;
+
+ *[0-7])
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw='% 200'
+ fi
+ cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+ *)
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw=,u+rw
+ fi
+ cp_umask=$mode$u_plus_rw;;
+ esac
+fi
+
+for src
+do
+ # Protect names problematic for 'test' and other utilities.
+ case $src in
+ -* | [=\(\)!]) src=./$src;;
+ esac
+
+ if test -n "$dir_arg"; then
+ dst=$src
+ dstdir=$dst
+ test -d "$dstdir"
+ dstdir_status=$?
+ # Don't chown directories that already exist.
+ if test $dstdir_status = 0; then
+ chowncmd=""
+ fi
+ else
+
+ # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+ # might cause directories to be created, which would be especially bad
+ # if $src (and thus $dsttmp) contains '*'.
+ if test ! -f "$src" && test ! -d "$src"; then
+ echo "$0: $src does not exist." >&2
+ exit 1
+ fi
+
+ if test -z "$dst_arg"; then
+ echo "$0: no destination specified." >&2
+ exit 1
+ fi
+ dst=$dst_arg
+
+ # If destination is a directory, append the input filename.
+ if test -d "$dst"; then
+ if test "$is_target_a_directory" = never; then
+ echo "$0: $dst_arg: Is a directory" >&2
+ exit 1
+ fi
+ dstdir=$dst
+ dstbase=`basename "$src"`
+ case $dst in
+ */) dst=$dst$dstbase;;
+ *) dst=$dst/$dstbase;;
+ esac
+ dstdir_status=0
+ else
+ dstdir=`dirname "$dst"`
+ test -d "$dstdir"
+ dstdir_status=$?
+ fi
+ fi
+
+ case $dstdir in
+ */) dstdirslash=$dstdir;;
+ *) dstdirslash=$dstdir/;;
+ esac
+
+ obsolete_mkdir_used=false
+
+ if test $dstdir_status != 0; then
+ case $posix_mkdir in
+ '')
+ # With -d, create the new directory with the user-specified mode.
+ # Otherwise, rely on $mkdir_umask.
+ if test -n "$dir_arg"; then
+ mkdir_mode=-m$mode
+ else
+ mkdir_mode=
+ fi
+
+ posix_mkdir=false
+ # The $RANDOM variable is not portable (e.g., dash). Use it
+ # here however when possible just to lower collision chance.
+ tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+
+ trap '
+ ret=$?
+ rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null
+ exit $ret
+ ' 0
+
+ # Because "mkdir -p" follows existing symlinks and we likely work
+ # directly in world-writeable /tmp, make sure that the '$tmpdir'
+ # directory is successfully created first before we actually test
+ # 'mkdir -p'.
+ if (umask $mkdir_umask &&
+ $mkdirprog $mkdir_mode "$tmpdir" &&
+ exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1
+ then
+ if test -z "$dir_arg" || {
+ # Check for POSIX incompatibilities with -m.
+ # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+ # other-writable bit of parent directory when it shouldn't.
+ # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+ test_tmpdir="$tmpdir/a"
+ ls_ld_tmpdir=`ls -ld "$test_tmpdir"`
+ case $ls_ld_tmpdir in
+ d????-?r-*) different_mode=700;;
+ d????-?--*) different_mode=755;;
+ *) false;;
+ esac &&
+ $mkdirprog -m$different_mode -p -- "$test_tmpdir" && {
+ ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"`
+ test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+ }
+ }
+ then posix_mkdir=:
+ fi
+ rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir"
+ else
+ # Remove any dirs left behind by ancient mkdir implementations.
+ rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null
+ fi
+ trap '' 0;;
+ esac
+
+ if
+ $posix_mkdir && (
+ umask $mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+ )
+ then :
+ else
+
+ # mkdir does not conform to POSIX,
+ # or it failed possibly due to a race condition. Create the
+ # directory the slow way, step by step, checking for races as we go.
+
+ case $dstdir in
+ /*) prefix='/';;
+ [-=\(\)!]*) prefix='./';;
+ *) prefix='';;
+ esac
+
+ oIFS=$IFS
+ IFS=/
+ set -f
+ set fnord $dstdir
+ shift
+ set +f
+ IFS=$oIFS
+
+ prefixes=
+
+ for d
+ do
+ test X"$d" = X && continue
+
+ prefix=$prefix$d
+ if test -d "$prefix"; then
+ prefixes=
+ else
+ if $posix_mkdir; then
+ (umask $mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+ # Don't fail if two instances are running concurrently.
+ test -d "$prefix" || exit 1
+ else
+ case $prefix in
+ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+ *) qprefix=$prefix;;
+ esac
+ prefixes="$prefixes '$qprefix'"
+ fi
+ fi
+ prefix=$prefix/
+ done
+
+ if test -n "$prefixes"; then
+ # Don't fail if two instances are running concurrently.
+ (umask $mkdir_umask &&
+ eval "\$doit_exec \$mkdirprog $prefixes") ||
+ test -d "$dstdir" || exit 1
+ obsolete_mkdir_used=true
+ fi
+ fi
+ fi
+
+ if test -n "$dir_arg"; then
+ { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+ { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+ else
+
+ # Make a couple of temp file names in the proper directory.
+ dsttmp=${dstdirslash}_inst.$$_
+ rmtmp=${dstdirslash}_rm.$$_
+
+ # Trap to clean up those temp files at exit.
+ trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+ # Copy the file name to the temp name.
+ (umask $cp_umask &&
+ { test -z "$stripcmd" || {
+ # Create $dsttmp read-write so that cp doesn't create it read-only,
+ # which would cause strip to fail.
+ if test -z "$doit"; then
+ : >"$dsttmp" # No need to fork-exec 'touch'.
+ else
+ $doit touch "$dsttmp"
+ fi
+ }
+ } &&
+ $doit_exec $cpprog "$src" "$dsttmp") &&
+
+ # and set any options; do chmod last to preserve setuid bits.
+ #
+ # If any of these fail, we abort the whole thing. If we want to
+ # ignore errors from any of these, just make sure not to ignore
+ # errors from the above "$doit $cpprog $src $dsttmp" command.
+ #
+ { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+ { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+ { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+ # If -C, don't bother to copy if it wouldn't change the file.
+ if $copy_on_change &&
+ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
+ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
+ set -f &&
+ set X $old && old=:$2:$4:$5:$6 &&
+ set X $new && new=:$2:$4:$5:$6 &&
+ set +f &&
+ test "$old" = "$new" &&
+ $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+ then
+ rm -f "$dsttmp"
+ else
+ # If $backupsuffix is set, and the file being installed
+ # already exists, attempt a backup. Don't worry if it fails,
+ # e.g., if mv doesn't support -f.
+ if test -n "$backupsuffix" && test -f "$dst"; then
+ $doit $mvcmd -f "$dst" "$dst$backupsuffix" 2>/dev/null
+ fi
+
+ # Rename the file to the real destination.
+ $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+ # The rename failed, perhaps because mv can't rename something else
+ # to itself, or perhaps because mv is so ancient that it does not
+ # support -f.
+ {
+ # Now remove or move aside any old file at destination location.
+ # We try this two ways since rm can't unlink itself on some
+ # systems and the destination file might be busy for other
+ # reasons. In this case, the final cleanup might fail but the new
+ # file should still install successfully.
+ {
+ test ! -f "$dst" ||
+ $doit $rmcmd "$dst" 2>/dev/null ||
+ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+ { $doit $rmcmd "$rmtmp" 2>/dev/null; :; }
+ } ||
+ { echo "$0: cannot unlink or rename $dst" >&2
+ (exit 1); exit 1
+ }
+ } &&
+
+ # Now rename the file to the real destination.
+ $doit $mvcmd "$dsttmp" "$dst"
+ }
+ fi || exit 1
+
+ trap '' 0
+ fi
+done
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC0"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/build-aux/ltmain.sh b/build-aux/ltmain.sh
new file mode 100644
index 0000000..2127422
--- /dev/null
+++ b/build-aux/ltmain.sh
@@ -0,0 +1,11338 @@
+#! /usr/bin/env sh
+## DO NOT EDIT - This file generated from ./build-aux/ltmain.in
+## by inline-source v2019-02-19.15
+
+# libtool (GNU libtool) 2.4.7.83-7b09-dirty
+# Provide generalized library-building support services.
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996-2019, 2021-2024 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions. There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4.7.83-7b09-dirty
+package_revision=2.4.7.83
+
+
+## ------ ##
+## Usage. ##
+## ------ ##
+
+# Run './libtool --help' for help with using this script from the
+# command line.
+
+
+## ------------------------------- ##
+## User overridable command paths. ##
+## ------------------------------- ##
+
+# After configure completes, it has a better idea of some of the
+# shell tools we need than the defaults used by the functions shared
+# with bootstrap, so set those here where they can still be over-
+# ridden by the user, but otherwise take precedence.
+
+: ${AUTOCONF="autoconf"}
+: ${AUTOMAKE="automake"}
+
+
+## -------------------------- ##
+## Source external libraries. ##
+## -------------------------- ##
+
+# Much of our low-level functionality needs to be sourced from external
+# libraries, which are installed to $pkgauxdir.
+
+# Set a version string for this script.
+scriptversion=2019-02-19.15; # UTC
+
+# General shell script boiler plate, and helper functions.
+# Written by Gary V. Vaughan, 2004
+
+# This is free software. There is NO warranty; not even for
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+#
+# Copyright (C) 2004-2019, 2021, 2023 Bootstrap Authors
+#
+# This file is dual licensed under the terms of the MIT license
+# <https://opensource.org/licenses/MIT>, and GPL version 2 or later
+# <https://www.gnu.org/licenses/gpl-2.0.html>. You must apply one of
+# these licenses when using or redistributing this software or any of
+# the files within it. See the URLs above, or the file `LICENSE`
+# included in the Bootstrap distribution for the full license texts.
+
+# Please report bugs or propose patches to:
+# <https://github.com/gnulib-modules/bootstrap/issues>
+
+
+## ------ ##
+## Usage. ##
+## ------ ##
+
+# Evaluate this file near the top of your script to gain access to
+# the functions and variables defined here:
+#
+# . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh
+#
+# If you need to override any of the default environment variable
+# settings, do that before evaluating this file.
+
+
+## -------------------- ##
+## Shell normalisation. ##
+## -------------------- ##
+
+# Some shells need a little help to be as Bourne compatible as possible.
+# Before doing anything else, make sure all that help has been provided!
+
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+ emulate sh
+ NULLCMD=:
+ # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '${1+"$@"}'='"$@"'
+ setopt NO_GLOB_SUBST
+else
+ case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac
+fi
+
+# NLS nuisances: We save the old values in case they are required later.
+_G_user_locale=
+_G_safe_locale=
+for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+ eval "if test set = \"\${$_G_var+set}\"; then
+ save_$_G_var=\$$_G_var
+ $_G_var=C
+ export $_G_var
+ _G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\"
+ _G_safe_locale=\"$_G_var=C; \$_G_safe_locale\"
+ fi"
+done
+# These NLS vars are set unconditionally (bootstrap issue #24). Unset those
+# in case the environment reset is needed later and the $save_* variant is not
+# defined (see the code above).
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+# Make sure IFS has a sensible default
+sp=' '
+nl='
+'
+IFS="$sp $nl"
+
+# There are apparently some systems that use ';' as a PATH separator!
+if test "${PATH_SEPARATOR+set}" != set; then
+ PATH_SEPARATOR=:
+ (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+ (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+ PATH_SEPARATOR=';'
+ }
+fi
+
+
+# func_unset VAR
+# --------------
+# Portably unset VAR.
+# In some shells, an 'unset VAR' statement leaves a non-zero return
+# status if VAR is already unset, which might be problematic if the
+# statement is used at the end of a function (thus poisoning its return
+# value) or when 'set -e' is active (causing even a spurious abort of
+# the script in this case).
+func_unset ()
+{
+ { eval $1=; (eval unset $1) >/dev/null 2>&1 && eval unset $1 || : ; }
+}
+
+
+# Make sure CDPATH doesn't cause `cd` commands to output the target dir.
+func_unset CDPATH
+
+# Make sure ${,E,F}GREP behave sanely.
+func_unset GREP_OPTIONS
+
+
+## ------------------------- ##
+## Locate command utilities. ##
+## ------------------------- ##
+
+
+# func_executable_p FILE
+# ----------------------
+# Check that FILE is an executable regular file.
+func_executable_p ()
+{
+ test -f "$1" && test -x "$1"
+}
+
+
+# func_path_progs PROGS_LIST CHECK_FUNC [PATH]
+# --------------------------------------------
+# Search for either a program that responds to --version with output
+# containing "GNU", or else returned by CHECK_FUNC otherwise, by
+# trying all the directories in PATH with each of the elements of
+# PROGS_LIST.
+#
+# CHECK_FUNC should accept the path to a candidate program, and
+# set $func_check_prog_result if it truncates its output less than
+# $_G_path_prog_max characters.
+func_path_progs ()
+{
+ _G_progs_list=$1
+ _G_check_func=$2
+ _G_PATH=${3-"$PATH"}
+
+ _G_path_prog_max=0
+ _G_path_prog_found=false
+ _G_save_IFS=$IFS; IFS=${PATH_SEPARATOR-:}
+ for _G_dir in $_G_PATH; do
+ IFS=$_G_save_IFS
+ test -z "$_G_dir" && _G_dir=.
+ for _G_prog_name in $_G_progs_list; do
+ for _exeext in '' .EXE; do
+ _G_path_prog=$_G_dir/$_G_prog_name$_exeext
+ func_executable_p "$_G_path_prog" || continue
+ case `"$_G_path_prog" --version 2>&1` in
+ *GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;;
+ *) $_G_check_func $_G_path_prog
+ func_path_progs_result=$func_check_prog_result
+ ;;
+ esac
+ $_G_path_prog_found && break 3
+ done
+ done
+ done
+ IFS=$_G_save_IFS
+ test -z "$func_path_progs_result" && {
+ echo "no acceptable sed could be found in \$PATH" >&2
+ exit 1
+ }
+}
+
+
+# We want to be able to use the functions in this file before configure
+# has figured out where the best binaries are kept, which means we have
+# to search for them ourselves - except when the results are already set
+# where we skip the searches.
+
+# Unless the user overrides by setting SED, search the path for either GNU
+# sed, or the sed that truncates its output the least.
+test -z "$SED" && {
+ _G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
+ for _G_i in 1 2 3 4 5 6 7; do
+ _G_sed_script=$_G_sed_script$nl$_G_sed_script
+ done
+ echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed
+ _G_sed_script=
+
+ func_check_prog_sed ()
+ {
+ _G_path_prog=$1
+
+ _G_count=0
+ printf 0123456789 >conftest.in
+ while :
+ do
+ cat conftest.in conftest.in >conftest.tmp
+ mv conftest.tmp conftest.in
+ cp conftest.in conftest.nl
+ echo '' >> conftest.nl
+ "$_G_path_prog" -f conftest.sed <conftest.nl >conftest.out 2>/dev/null || break
+ diff conftest.out conftest.nl >/dev/null 2>&1 || break
+ _G_count=`expr $_G_count + 1`
+ if test "$_G_count" -gt "$_G_path_prog_max"; then
+ # Best one so far, save it but keep looking for a better one
+ func_check_prog_result=$_G_path_prog
+ _G_path_prog_max=$_G_count
+ fi
+ # 10*(2^10) chars as input seems more than enough
+ test 10 -lt "$_G_count" && break
+ done
+ rm -f conftest.in conftest.tmp conftest.nl conftest.out
+ }
+
+ func_path_progs "sed gsed" func_check_prog_sed "$PATH:/usr/xpg4/bin"
+ rm -f conftest.sed
+ SED=$func_path_progs_result
+}
+
+
+# Unless the user overrides by setting GREP, search the path for either GNU
+# grep, or the grep that truncates its output the least.
+test -z "$GREP" && {
+ func_check_prog_grep ()
+ {
+ _G_path_prog=$1
+
+ _G_count=0
+ _G_path_prog_max=0
+ printf 0123456789 >conftest.in
+ while :
+ do
+ cat conftest.in conftest.in >conftest.tmp
+ mv conftest.tmp conftest.in
+ cp conftest.in conftest.nl
+ echo 'GREP' >> conftest.nl
+ "$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' <conftest.nl >conftest.out 2>/dev/null || break
+ diff conftest.out conftest.nl >/dev/null 2>&1 || break
+ _G_count=`expr $_G_count + 1`
+ if test "$_G_count" -gt "$_G_path_prog_max"; then
+ # Best one so far, save it but keep looking for a better one
+ func_check_prog_result=$_G_path_prog
+ _G_path_prog_max=$_G_count
+ fi
+ # 10*(2^10) chars as input seems more than enough
+ test 10 -lt "$_G_count" && break
+ done
+ rm -f conftest.in conftest.tmp conftest.nl conftest.out
+ }
+
+ func_path_progs "grep ggrep" func_check_prog_grep "$PATH:/usr/xpg4/bin"
+ GREP=$func_path_progs_result
+}
+
+
+## ------------------------------- ##
+## User overridable command paths. ##
+## ------------------------------- ##
+
+# All uppercase variable names are used for environment variables. These
+# variables can be overridden by the user before calling a script that
+# uses them if a suitable command of that name is not already available
+# in the command search PATH.
+
+: ${CP="cp -f"}
+: ${ECHO="printf %s\n"}
+: ${EGREP="$GREP -E"}
+: ${FGREP="$GREP -F"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+
+
+## -------------------- ##
+## Useful sed snippets. ##
+## -------------------- ##
+
+sed_dirname='s|/[^/]*$||'
+sed_basename='s|^.*/||'
+
+# Sed substitution that helps us do robust quoting. It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='s|\([`"$\\]\)|\\\1|g'
+
+# Same as above, but do not quote variable references.
+sed_double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g'
+
+# Sed substitution that converts a w32 file name or path
+# that contains forward slashes, into one that contains
+# (escaped) backslashes. A very naive implementation.
+sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-'\' parameter expansions in output of sed_double_quote_subst that
+# were '\'-ed in input to the same. If an odd number of '\' preceded a
+# '$' in input to sed_double_quote_subst, that '$' was protected from
+# expansion. Since each input '\' is now two '\'s, look for any number
+# of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'.
+_G_bs='\\'
+_G_bs2='\\\\'
+_G_bs4='\\\\\\\\'
+_G_dollar='\$'
+sed_double_backslash="\
+ s/$_G_bs4/&\\
+/g
+ s/^$_G_bs2$_G_dollar/$_G_bs&/
+ s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g
+ s/\n//g"
+
+# require_check_ifs_backslash
+# ---------------------------
+# Check if we can use backslash as IFS='\' separator, and set
+# $check_ifs_backshlash_broken to ':' or 'false'.
+require_check_ifs_backslash=func_require_check_ifs_backslash
+func_require_check_ifs_backslash ()
+{
+ _G_save_IFS=$IFS
+ IFS='\'
+ _G_check_ifs_backshlash='a\\b'
+ for _G_i in $_G_check_ifs_backshlash
+ do
+ case $_G_i in
+ a)
+ check_ifs_backshlash_broken=false
+ ;;
+ '')
+ break
+ ;;
+ *)
+ check_ifs_backshlash_broken=:
+ break
+ ;;
+ esac
+ done
+ IFS=$_G_save_IFS
+ require_check_ifs_backslash=:
+}
+
+
+## ----------------- ##
+## Global variables. ##
+## ----------------- ##
+
+# Except for the global variables explicitly listed below, the following
+# functions in the '^func_' namespace, and the '^require_' namespace
+# variables initialised in the 'Resource management' section, sourcing
+# this file will not pollute your global namespace with anything
+# else. There's no portable way to scope variables in Bourne shell
+# though, so actually running these functions will sometimes place
+# results into a variable named after the function, and often use
+# temporary variables in the '^_G_' namespace. If you are careful to
+# avoid using those namespaces casually in your sourcing script, things
+# should continue to work as you expect. And, of course, you can freely
+# overwrite any of the functions or variables defined here before
+# calling anything to customize them.
+
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake.
+
+# Allow overriding, eg assuming that you follow the convention of
+# putting '$debug_cmd' at the start of all your functions, you can get
+# bash to show function call trace with:
+#
+# debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name
+debug_cmd=${debug_cmd-":"}
+exit_cmd=:
+
+# By convention, finish your script with:
+#
+# exit $exit_status
+#
+# so that you can set exit_status to non-zero if you want to indicate
+# something went wrong during execution without actually bailing out at
+# the point of failure.
+exit_status=$EXIT_SUCCESS
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath=$0
+
+# The name of this program.
+progname=`$ECHO "$progpath" |$SED "$sed_basename"`
+
+# Make sure we have an absolute progpath for reexecution:
+case $progpath in
+ [\\/]*|[A-Za-z]:\\*) ;;
+ *[\\/]*)
+ progdir=`$ECHO "$progpath" |$SED "$sed_dirname"`
+ progdir=`cd "$progdir" && pwd`
+ progpath=$progdir/$progname
+ ;;
+ *)
+ _G_IFS=$IFS
+ IFS=${PATH_SEPARATOR-:}
+ for progdir in $PATH; do
+ IFS=$_G_IFS
+ test -x "$progdir/$progname" && break
+ done
+ IFS=$_G_IFS
+ test -n "$progdir" || progdir=`pwd`
+ progpath=$progdir/$progname
+ ;;
+esac
+
+
+## ----------------- ##
+## Standard options. ##
+## ----------------- ##
+
+# The following options affect the operation of the functions defined
+# below, and should be set appropriately depending on run-time para-
+# meters passed on the command line.
+
+opt_dry_run=false
+opt_quiet=false
+opt_verbose=false
+
+# Categories 'all' and 'none' are always available. Append any others
+# you will pass as the first argument to func_warning from your own
+# code.
+warning_categories=
+
+# By default, display warnings according to 'opt_warning_types'. Set
+# 'warning_func' to ':' to elide all warnings, or func_fatal_error to
+# treat the next displayed warning as a fatal error.
+warning_func=func_warn_and_continue
+
+# Set to 'all' to display all warnings, 'none' to suppress all
+# warnings, or a space delimited list of some subset of
+# 'warning_categories' to display only the listed warnings.
+opt_warning_types=all
+
+
+## -------------------- ##
+## Resource management. ##
+## -------------------- ##
+
+# This section contains definitions for functions that each ensure a
+# particular resource (a file, or a non-empty configuration variable for
+# example) is available, and if appropriate to extract default values
+# from pertinent package files. Call them using their associated
+# 'require_*' variable to ensure that they are executed, at most, once.
+#
+# It's entirely deliberate that calling these functions can set
+# variables that don't obey the namespace limitations obeyed by the rest
+# of this file, in order that that they be as useful as possible to
+# callers.
+
+
+# require_term_colors
+# -------------------
+# Allow display of bold text on terminals that support it.
+require_term_colors=func_require_term_colors
+func_require_term_colors ()
+{
+ $debug_cmd
+
+ test -t 1 && {
+ # COLORTERM and USE_ANSI_COLORS environment variables take
+ # precedence, because most terminfo databases neglect to describe
+ # whether color sequences are supported.
+ test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"}
+
+ if test 1 = "$USE_ANSI_COLORS"; then
+ # Standard ANSI escape sequences
+ tc_reset=''
+ tc_bold=''; tc_standout=''
+ tc_red=''; tc_green=''
+ tc_blue=''; tc_cyan=''
+ else
+ # Otherwise trust the terminfo database after all.
+ test -n "`tput sgr0 2>/dev/null`" && {
+ tc_reset=`tput sgr0`
+ test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold`
+ tc_standout=$tc_bold
+ test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso`
+ test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1`
+ test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2`
+ test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4`
+ test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5`
+ }
+ fi
+ }
+
+ require_term_colors=:
+}
+
+
+## ----------------- ##
+## Function library. ##
+## ----------------- ##
+
+# This section contains a variety of useful functions to call in your
+# scripts. Take note of the portable wrappers for features provided by
+# some modern shells, which will fall back to slower equivalents on
+# less featureful shells.
+
+
+# func_append VAR VALUE
+# ---------------------
+# Append VALUE onto the existing contents of VAR.
+
+ # We should try to minimise forks, especially on Windows where they are
+ # unreasonably slow, so skip the feature probes when bash or zsh are
+ # being used:
+ if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then
+ : ${_G_HAVE_ARITH_OP="yes"}
+ : ${_G_HAVE_XSI_OPS="yes"}
+ # The += operator was introduced in bash 3.1
+ case $BASH_VERSION in
+ [12].* | 3.0 | 3.0*) ;;
+ *)
+ : ${_G_HAVE_PLUSEQ_OP="yes"}
+ ;;
+ esac
+ fi
+
+ # _G_HAVE_PLUSEQ_OP
+ # Can be empty, in which case the shell is probed, "yes" if += is
+ # useable or anything else if it does not work.
+ test -z "$_G_HAVE_PLUSEQ_OP" \
+ && (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \
+ && _G_HAVE_PLUSEQ_OP=yes
+
+if test yes = "$_G_HAVE_PLUSEQ_OP"
+then
+ # This is an XSI compatible shell, allowing a faster implementation...
+ eval 'func_append ()
+ {
+ $debug_cmd
+
+ eval "$1+=\$2"
+ }'
+else
+ # ...otherwise fall back to using expr, which is often a shell builtin.
+ func_append ()
+ {
+ $debug_cmd
+
+ eval "$1=\$$1\$2"
+ }
+fi
+
+
+# func_append_quoted VAR VALUE
+# ----------------------------
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+if test yes = "$_G_HAVE_PLUSEQ_OP"; then
+ eval 'func_append_quoted ()
+ {
+ $debug_cmd
+
+ func_quote_arg pretty "$2"
+ eval "$1+=\\ \$func_quote_arg_result"
+ }'
+else
+ func_append_quoted ()
+ {
+ $debug_cmd
+
+ func_quote_arg pretty "$2"
+ eval "$1=\$$1\\ \$func_quote_arg_result"
+ }
+fi
+
+
+# func_append_uniq VAR VALUE
+# --------------------------
+# Append unique VALUE onto the existing contents of VAR, assuming
+# entries are delimited by the first character of VALUE. For example:
+#
+# func_append_uniq options " --another-option option-argument"
+#
+# will only append to $options if " --another-option option-argument "
+# is not already present somewhere in $options already (note spaces at
+# each end implied by leading space in second argument).
+func_append_uniq ()
+{
+ $debug_cmd
+
+ eval _G_current_value='`$ECHO $'$1'`'
+ _G_delim=`expr "$2" : '\(.\)'`
+
+ case $_G_delim$_G_current_value$_G_delim in
+ *"$2$_G_delim"*) ;;
+ *) func_append "$@" ;;
+ esac
+}
+
+
+# func_arith TERM...
+# ------------------
+# Set func_arith_result to the result of evaluating TERMs.
+ test -z "$_G_HAVE_ARITH_OP" \
+ && (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \
+ && _G_HAVE_ARITH_OP=yes
+
+if test yes = "$_G_HAVE_ARITH_OP"; then
+ eval 'func_arith ()
+ {
+ $debug_cmd
+
+ func_arith_result=$(( $* ))
+ }'
+else
+ func_arith ()
+ {
+ $debug_cmd
+
+ func_arith_result=`expr "$@"`
+ }
+fi
+
+
+# func_basename FILE
+# ------------------
+# Set func_basename_result to FILE with everything up to and including
+# the last / stripped.
+if test yes = "$_G_HAVE_XSI_OPS"; then
+ # If this shell supports suffix pattern removal, then use it to avoid
+ # forking. Hide the definitions single quotes in case the shell chokes
+ # on unsupported syntax...
+ _b='func_basename_result=${1##*/}'
+ _d='case $1 in
+ */*) func_dirname_result=${1%/*}$2 ;;
+ * ) func_dirname_result=$3 ;;
+ esac'
+
+else
+ # ...otherwise fall back to using sed.
+ _b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`'
+ _d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"`
+ if test "X$func_dirname_result" = "X$1"; then
+ func_dirname_result=$3
+ else
+ func_append func_dirname_result "$2"
+ fi'
+fi
+
+eval 'func_basename ()
+{
+ $debug_cmd
+
+ '"$_b"'
+}'
+
+
+# func_dirname FILE APPEND NONDIR_REPLACEMENT
+# -------------------------------------------
+# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+eval 'func_dirname ()
+{
+ $debug_cmd
+
+ '"$_d"'
+}'
+
+
+# func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT
+# --------------------------------------------------------
+# Perform func_basename and func_dirname in a single function
+# call:
+# dirname: Compute the dirname of FILE. If nonempty,
+# add APPEND to the result, otherwise set result
+# to NONDIR_REPLACEMENT.
+# value returned in "$func_dirname_result"
+# basename: Compute filename of FILE.
+# value retuned in "$func_basename_result"
+# For efficiency, we do not delegate to the functions above but instead
+# duplicate the functionality here.
+eval 'func_dirname_and_basename ()
+{
+ $debug_cmd
+
+ '"$_b"'
+ '"$_d"'
+}'
+
+
+# func_echo ARG...
+# ----------------
+# Echo program name prefixed message.
+func_echo ()
+{
+ $debug_cmd
+
+ _G_message=$*
+
+ func_echo_IFS=$IFS
+ IFS=$nl
+ for _G_line in $_G_message; do
+ IFS=$func_echo_IFS
+ $ECHO "$progname: $_G_line"
+ done
+ IFS=$func_echo_IFS
+}
+
+
+# func_echo_all ARG...
+# --------------------
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+ $ECHO "$*"
+}
+
+
+# func_echo_infix_1 INFIX ARG...
+# ------------------------------
+# Echo program name, followed by INFIX on the first line, with any
+# additional lines not showing INFIX.
+func_echo_infix_1 ()
+{
+ $debug_cmd
+
+ $require_term_colors
+
+ _G_infix=$1; shift
+ _G_indent=$_G_infix
+ _G_prefix="$progname: $_G_infix: "
+ _G_message=$*
+
+ # Strip color escape sequences before counting printable length
+ for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan"
+ do
+ test -n "$_G_tc" && {
+ _G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"`
+ _G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"`
+ }
+ done
+ _G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes
+
+ func_echo_infix_1_IFS=$IFS
+ IFS=$nl
+ for _G_line in $_G_message; do
+ IFS=$func_echo_infix_1_IFS
+ $ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2
+ _G_prefix=$_G_indent
+ done
+ IFS=$func_echo_infix_1_IFS
+}
+
+
+# func_error ARG...
+# -----------------
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+ $debug_cmd
+
+ $require_term_colors
+
+ func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2
+}
+
+
+# func_fatal_error ARG...
+# -----------------------
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+ $debug_cmd
+
+ func_error "$*"
+ exit $EXIT_FAILURE
+}
+
+
+# func_grep EXPRESSION FILENAME
+# -----------------------------
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+ $debug_cmd
+
+ $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_len STRING
+# ---------------
+# Set func_len_result to the length of STRING. STRING may not
+# start with a hyphen.
+ test -z "$_G_HAVE_XSI_OPS" \
+ && (eval 'x=a/b/c;
+ test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
+ && _G_HAVE_XSI_OPS=yes
+
+if test yes = "$_G_HAVE_XSI_OPS"; then
+ eval 'func_len ()
+ {
+ $debug_cmd
+
+ func_len_result=${#1}
+ }'
+else
+ func_len ()
+ {
+ $debug_cmd
+
+ func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+ }
+fi
+
+
+# func_mkdir_p DIRECTORY-PATH
+# ---------------------------
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+ $debug_cmd
+
+ _G_directory_path=$1
+ _G_dir_list=
+
+ if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then
+
+ # Protect directory names starting with '-'
+ case $_G_directory_path in
+ -*) _G_directory_path=./$_G_directory_path ;;
+ esac
+
+ # While some portion of DIR does not yet exist...
+ while test ! -d "$_G_directory_path"; do
+ # ...make a list in topmost first order. Use a colon delimited
+ # list incase some portion of path contains whitespace.
+ _G_dir_list=$_G_directory_path:$_G_dir_list
+
+ # If the last portion added has no slash in it, the list is done
+ case $_G_directory_path in */*) ;; *) break ;; esac
+
+ # ...otherwise throw away the child directory and loop
+ _G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"`
+ done
+ _G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'`
+
+ func_mkdir_p_IFS=$IFS; IFS=:
+ for _G_dir in $_G_dir_list; do
+ IFS=$func_mkdir_p_IFS
+ # mkdir can fail with a 'File exist' error if two processes
+ # try to create one of the directories concurrently. Don't
+ # stop in that case!
+ $MKDIR "$_G_dir" 2>/dev/null || :
+ done
+ IFS=$func_mkdir_p_IFS
+
+ # Bail out if we (or some other process) failed to create a directory.
+ test -d "$_G_directory_path" || \
+ func_fatal_error "Failed to create '$1'"
+ fi
+}
+
+
+# func_mktempdir [BASENAME]
+# -------------------------
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible. If
+# given, BASENAME is the basename for that directory.
+func_mktempdir ()
+{
+ $debug_cmd
+
+ _G_template=${TMPDIR-/tmp}/${1-$progname}
+
+ if test : = "$opt_dry_run"; then
+ # Return a directory name, but don't create it in dry-run mode
+ _G_tmpdir=$_G_template-$$
+ else
+
+ # If mktemp works, use that first and foremost
+ _G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null`
+
+ if test ! -d "$_G_tmpdir"; then
+ # Failing that, at least try and use $RANDOM to avoid a race
+ _G_tmpdir=$_G_template-${RANDOM-0}$$
+
+ func_mktempdir_umask=`umask`
+ umask 0077
+ $MKDIR "$_G_tmpdir"
+ umask $func_mktempdir_umask
+ fi
+
+ # If we're not in dry-run mode, bomb out on failure
+ test -d "$_G_tmpdir" || \
+ func_fatal_error "cannot create temporary directory '$_G_tmpdir'"
+ fi
+
+ $ECHO "$_G_tmpdir"
+}
+
+
+# func_normal_abspath PATH
+# ------------------------
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+func_normal_abspath ()
+{
+ $debug_cmd
+
+ # These SED scripts presuppose an absolute path with a trailing slash.
+ _G_pathcar='s|^/\([^/]*\).*$|\1|'
+ _G_pathcdr='s|^/[^/]*||'
+ _G_removedotparts=':dotsl
+ s|/\./|/|g
+ t dotsl
+ s|/\.$|/|'
+ _G_collapseslashes='s|/\{1,\}|/|g'
+ _G_finalslash='s|/*$|/|'
+
+ # Start from root dir and reassemble the path.
+ func_normal_abspath_result=
+ func_normal_abspath_tpath=$1
+ func_normal_abspath_altnamespace=
+ case $func_normal_abspath_tpath in
+ "")
+ # Empty path, that just means $cwd.
+ func_stripname '' '/' "`pwd`"
+ func_normal_abspath_result=$func_stripname_result
+ return
+ ;;
+ # The next three entries are used to spot a run of precisely
+ # two leading slashes without using negated character classes;
+ # we take advantage of case's first-match behaviour.
+ ///*)
+ # Unusual form of absolute path, do nothing.
+ ;;
+ //*)
+ # Not necessarily an ordinary path; POSIX reserves leading '//'
+ # and for example Cygwin uses it to access remote file shares
+ # over CIFS/SMB, so we conserve a leading double slash if found.
+ func_normal_abspath_altnamespace=/
+ ;;
+ /*)
+ # Absolute path, do nothing.
+ ;;
+ *)
+ # Relative path, prepend $cwd.
+ func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+ ;;
+ esac
+
+ # Cancel out all the simple stuff to save iterations. We also want
+ # the path to end with a slash for ease of parsing, so make sure
+ # there is one (and only one) here.
+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"`
+ while :; do
+ # Processed it all yet?
+ if test / = "$func_normal_abspath_tpath"; then
+ # If we ascended to the root using ".." the result may be empty now.
+ if test -z "$func_normal_abspath_result"; then
+ func_normal_abspath_result=/
+ fi
+ break
+ fi
+ func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$_G_pathcar"`
+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$_G_pathcdr"`
+ # Figure out what to do with it
+ case $func_normal_abspath_tcomponent in
+ "")
+ # Trailing empty path component, ignore it.
+ ;;
+ ..)
+ # Parent dir; strip last assembled component from result.
+ func_dirname "$func_normal_abspath_result"
+ func_normal_abspath_result=$func_dirname_result
+ ;;
+ *)
+ # Actual path component, append it.
+ func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent"
+ ;;
+ esac
+ done
+ # Restore leading double-slash if one was found on entry.
+ func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+
+# func_notquiet ARG...
+# --------------------
+# Echo program name prefixed message only when not in quiet mode.
+func_notquiet ()
+{
+ $debug_cmd
+
+ $opt_quiet || func_echo ${1+"$@"}
+
+ # A bug in bash halts the script if the last line of a function
+ # fails when set -e is in force, so we need another command to
+ # work around that:
+ :
+}
+
+
+# func_relative_path SRCDIR DSTDIR
+# --------------------------------
+# Set func_relative_path_result to the relative path from SRCDIR to DSTDIR.
+func_relative_path ()
+{
+ $debug_cmd
+
+ func_relative_path_result=
+ func_normal_abspath "$1"
+ func_relative_path_tlibdir=$func_normal_abspath_result
+ func_normal_abspath "$2"
+ func_relative_path_tbindir=$func_normal_abspath_result
+
+ # Ascend the tree starting from libdir
+ while :; do
+ # check if we have found a prefix of bindir
+ case $func_relative_path_tbindir in
+ $func_relative_path_tlibdir)
+ # found an exact match
+ func_relative_path_tcancelled=
+ break
+ ;;
+ $func_relative_path_tlibdir*)
+ # found a matching prefix
+ func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+ func_relative_path_tcancelled=$func_stripname_result
+ if test -z "$func_relative_path_result"; then
+ func_relative_path_result=.
+ fi
+ break
+ ;;
+ *)
+ func_dirname $func_relative_path_tlibdir
+ func_relative_path_tlibdir=$func_dirname_result
+ if test -z "$func_relative_path_tlibdir"; then
+ # Have to descend all the way to the root!
+ func_relative_path_result=../$func_relative_path_result
+ func_relative_path_tcancelled=$func_relative_path_tbindir
+ break
+ fi
+ func_relative_path_result=../$func_relative_path_result
+ ;;
+ esac
+ done
+
+ # Now calculate path; take care to avoid doubling-up slashes.
+ func_stripname '' '/' "$func_relative_path_result"
+ func_relative_path_result=$func_stripname_result
+ func_stripname '/' '/' "$func_relative_path_tcancelled"
+ if test -n "$func_stripname_result"; then
+ func_append func_relative_path_result "/$func_stripname_result"
+ fi
+
+ # Normalisation. If bindir is libdir, return '.' else relative path.
+ if test -n "$func_relative_path_result"; then
+ func_stripname './' '' "$func_relative_path_result"
+ func_relative_path_result=$func_stripname_result
+ fi
+
+ test -n "$func_relative_path_result" || func_relative_path_result=.
+
+ :
+}
+
+
+# func_quote_portable EVAL ARG
+# ----------------------------
+# Internal function to portably implement func_quote_arg. Note that we still
+# keep attention to performance here so we as much as possible try to avoid
+# calling sed binary (so far O(N) complexity as long as func_append is O(1)).
+func_quote_portable ()
+{
+ $debug_cmd
+
+ $require_check_ifs_backslash
+
+ func_quote_portable_result=$2
+
+ # one-time-loop (easy break)
+ while true
+ do
+ if $1; then
+ func_quote_portable_result=`$ECHO "$2" | $SED \
+ -e "$sed_double_quote_subst" -e "$sed_double_backslash"`
+ break
+ fi
+
+ # Quote for eval.
+ case $func_quote_portable_result in
+ *[\\\`\"\$]*)
+ # Fallback to sed for $func_check_bs_ifs_broken=:, or when the string
+ # contains the shell wildcard characters.
+ case $check_ifs_backshlash_broken$func_quote_portable_result in
+ :*|*[\[\*\?]*)
+ func_quote_portable_result=`$ECHO "$func_quote_portable_result" \
+ | $SED "$sed_quote_subst"`
+ break
+ ;;
+ esac
+
+ func_quote_portable_old_IFS=$IFS
+ for _G_char in '\' '`' '"' '$'
+ do
+ # STATE($1) PREV($2) SEPARATOR($3)
+ set start "" ""
+ func_quote_portable_result=dummy"$_G_char$func_quote_portable_result$_G_char"dummy
+ IFS=$_G_char
+ for _G_part in $func_quote_portable_result
+ do
+ case $1 in
+ quote)
+ func_append func_quote_portable_result "$3$2"
+ set quote "$_G_part" "\\$_G_char"
+ ;;
+ start)
+ set first "" ""
+ func_quote_portable_result=
+ ;;
+ first)
+ set quote "$_G_part" ""
+ ;;
+ esac
+ done
+ done
+ IFS=$func_quote_portable_old_IFS
+ ;;
+ *) ;;
+ esac
+ break
+ done
+
+ func_quote_portable_unquoted_result=$func_quote_portable_result
+ case $func_quote_portable_result in
+ # double-quote args containing shell metacharacters to delay
+ # word splitting, command substitution and variable expansion
+ # for a subsequent eval.
+ # many bourne shells cannot handle close brackets correctly
+ # in scan sets, so we specify it separately.
+ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
+ func_quote_portable_result=\"$func_quote_portable_result\"
+ ;;
+ esac
+}
+
+
+# func_quotefast_eval ARG
+# -----------------------
+# Quote one ARG (internal). This is equivalent to 'func_quote_arg eval ARG',
+# but optimized for speed. Result is stored in $func_quotefast_eval.
+if test xyes = `(x=; printf -v x %q yes; echo x"$x") 2>/dev/null`; then
+ printf -v _GL_test_printf_tilde %q '~'
+ if test '\~' = "$_GL_test_printf_tilde"; then
+ func_quotefast_eval ()
+ {
+ printf -v func_quotefast_eval_result %q "$1"
+ }
+ else
+ # Broken older Bash implementations. Make those faster too if possible.
+ func_quotefast_eval ()
+ {
+ case $1 in
+ '~'*)
+ func_quote_portable false "$1"
+ func_quotefast_eval_result=$func_quote_portable_result
+ ;;
+ *)
+ printf -v func_quotefast_eval_result %q "$1"
+ ;;
+ esac
+ }
+ fi
+else
+ func_quotefast_eval ()
+ {
+ func_quote_portable false "$1"
+ func_quotefast_eval_result=$func_quote_portable_result
+ }
+fi
+
+
+# func_quote_arg MODEs ARG
+# ------------------------
+# Quote one ARG to be evaled later. MODEs argument may contain zero or more
+# specifiers listed below separated by ',' character. This function returns two
+# values:
+# i) func_quote_arg_result
+# double-quoted (when needed), suitable for a subsequent eval
+# ii) func_quote_arg_unquoted_result
+# has all characters that are still active within double
+# quotes backslashified. Available only if 'unquoted' is specified.
+#
+# Available modes:
+# ----------------
+# 'eval' (default)
+# - escape shell special characters
+# 'expand'
+# - the same as 'eval'; but do not quote variable references
+# 'pretty'
+# - request aesthetic output, i.e. '"a b"' instead of 'a\ b'. This might
+# be used later in func_quote to get output like: 'echo "a b"' instead
+# of 'echo a\ b'. This is slower than default on some shells.
+# 'unquoted'
+# - produce also $func_quote_arg_unquoted_result which does not contain
+# wrapping double-quotes.
+#
+# Examples for 'func_quote_arg pretty,unquoted string':
+#
+# string | *_result | *_unquoted_result
+# ------------+-----------------------+-------------------
+# " | \" | \"
+# a b | "a b" | a b
+# "a b" | "\"a b\"" | \"a b\"
+# * | "*" | *
+# z="${x-$y}" | "z=\"\${x-\$y}\"" | z=\"\${x-\$y}\"
+#
+# Examples for 'func_quote_arg pretty,unquoted,expand string':
+#
+# string | *_result | *_unquoted_result
+# --------------+---------------------+--------------------
+# z="${x-$y}" | "z=\"${x-$y}\"" | z=\"${x-$y}\"
+func_quote_arg ()
+{
+ _G_quote_expand=false
+ case ,$1, in
+ *,expand,*)
+ _G_quote_expand=:
+ ;;
+ esac
+
+ case ,$1, in
+ *,pretty,*|*,expand,*|*,unquoted,*)
+ func_quote_portable $_G_quote_expand "$2"
+ func_quote_arg_result=$func_quote_portable_result
+ func_quote_arg_unquoted_result=$func_quote_portable_unquoted_result
+ ;;
+ *)
+ # Faster quote-for-eval for some shells.
+ func_quotefast_eval "$2"
+ func_quote_arg_result=$func_quotefast_eval_result
+ ;;
+ esac
+}
+
+
+# func_quote MODEs ARGs...
+# ------------------------
+# Quote all ARGs to be evaled later and join them into single command. See
+# func_quote_arg's description for more info.
+func_quote ()
+{
+ $debug_cmd
+ _G_func_quote_mode=$1 ; shift
+ func_quote_result=
+ while test 0 -lt $#; do
+ func_quote_arg "$_G_func_quote_mode" "$1"
+ if test -n "$func_quote_result"; then
+ func_append func_quote_result " $func_quote_arg_result"
+ else
+ func_append func_quote_result "$func_quote_arg_result"
+ fi
+ shift
+ done
+}
+
+
+# func_stripname PREFIX SUFFIX NAME
+# ---------------------------------
+# strip PREFIX and SUFFIX from NAME, and store in func_stripname_result.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+if test yes = "$_G_HAVE_XSI_OPS"; then
+ eval 'func_stripname ()
+ {
+ $debug_cmd
+
+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+ # positional parameters, so assign one to ordinary variable first.
+ func_stripname_result=$3
+ func_stripname_result=${func_stripname_result#"$1"}
+ func_stripname_result=${func_stripname_result%"$2"}
+ }'
+else
+ func_stripname ()
+ {
+ $debug_cmd
+
+ case $2 in
+ .*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;;
+ *) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;;
+ esac
+ }
+fi
+
+
+# func_show_eval CMD [FAIL_EXP]
+# -----------------------------
+# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
+# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+ $debug_cmd
+
+ _G_cmd=$1
+ _G_fail_exp=${2-':'}
+
+ func_quote_arg pretty,expand "$_G_cmd"
+ eval "func_notquiet $func_quote_arg_result"
+
+ $opt_dry_run || {
+ eval "$_G_cmd"
+ _G_status=$?
+ if test 0 -ne "$_G_status"; then
+ eval "(exit $_G_status); $_G_fail_exp"
+ fi
+ }
+}
+
+
+# func_show_eval_locale CMD [FAIL_EXP]
+# ------------------------------------
+# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
+# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it. Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+ $debug_cmd
+
+ _G_cmd=$1
+ _G_fail_exp=${2-':'}
+
+ $opt_quiet || {
+ func_quote_arg expand,pretty "$_G_cmd"
+ eval "func_echo $func_quote_arg_result"
+ }
+
+ $opt_dry_run || {
+ eval "$_G_user_locale
+ $_G_cmd"
+ _G_status=$?
+ eval "$_G_safe_locale"
+ if test 0 -ne "$_G_status"; then
+ eval "(exit $_G_status); $_G_fail_exp"
+ fi
+ }
+}
+
+
+# func_tr_sh
+# ----------
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result. All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+ $debug_cmd
+
+ case $1 in
+ [0-9]* | *[!a-zA-Z0-9_]*)
+ func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'`
+ ;;
+ * )
+ func_tr_sh_result=$1
+ ;;
+ esac
+}
+
+
+# func_verbose ARG...
+# -------------------
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+ $debug_cmd
+
+ $opt_verbose && func_echo "$*"
+
+ :
+}
+
+
+# func_warn_and_continue ARG...
+# -----------------------------
+# Echo program name prefixed warning message to standard error.
+func_warn_and_continue ()
+{
+ $debug_cmd
+
+ $require_term_colors
+
+ func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2
+}
+
+
+# func_warning CATEGORY ARG...
+# ----------------------------
+# Echo program name prefixed warning message to standard error. Warning
+# messages can be filtered according to CATEGORY, where this function
+# elides messages where CATEGORY is not listed in the global variable
+# 'opt_warning_types'.
+func_warning ()
+{
+ $debug_cmd
+
+ # CATEGORY must be in the warning_categories list!
+ case " $warning_categories " in
+ *" $1 "*) ;;
+ *) func_internal_error "invalid warning category '$1'" ;;
+ esac
+
+ _G_category=$1
+ shift
+
+ case " $opt_warning_types " in
+ *" $_G_category "*) $warning_func ${1+"$@"} ;;
+ esac
+}
+
+
+# func_sort_ver VER1 VER2
+# -----------------------
+# 'sort -V' is not generally available.
+# Note this deviates from the version comparison in automake
+# in that it treats 1.5 < 1.5.0, and treats 1.4.4a < 1.4-p3a
+# but this should suffice as we won't be specifying old
+# version formats or redundant trailing .0 in bootstrap.conf.
+# If we did want full compatibility then we should probably
+# use m4_version_compare from autoconf.
+func_sort_ver ()
+{
+ $debug_cmd
+
+ printf '%s\n%s\n' "$1" "$2" \
+ | sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n
+}
+
+# func_lt_ver PREV CURR
+# ---------------------
+# Return true if PREV and CURR are in the correct order according to
+# func_sort_ver, otherwise false. Use it like this:
+#
+# func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..."
+func_lt_ver ()
+{
+ $debug_cmd
+
+ test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q`
+}
+
+
+# Local variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC"
+# time-stamp-time-zone: "UTC"
+# End:
+#! /bin/sh
+
+# A portable, pluggable option parser for Bourne shell.
+# Written by Gary V. Vaughan, 2010
+
+# This is free software. There is NO warranty; not even for
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+#
+# Copyright (C) 2010-2019, 2021, 2023 Bootstrap Authors
+#
+# This file is dual licensed under the terms of the MIT license
+# <https://opensource.org/licenses/MIT>, and GPL version 2 or later
+# <https://www.gnu.org/licenses/gpl-2.0.html>. You must apply one of
+# these licenses when using or redistributing this software or any of
+# the files within it. See the URLs above, or the file `LICENSE`
+# included in the Bootstrap distribution for the full license texts.
+
+# Please report bugs or propose patches to:
+# <https://github.com/gnulib-modules/bootstrap/issues>
+
+# Set a version string for this script.
+scriptversion=2019-02-19.15; # UTC
+
+
+## ------ ##
+## Usage. ##
+## ------ ##
+
+# This file is a library for parsing options in your shell scripts along
+# with assorted other useful supporting features that you can make use
+# of too.
+#
+# For the simplest scripts you might need only:
+#
+# #!/bin/sh
+# . relative/path/to/funclib.sh
+# . relative/path/to/options-parser
+# scriptversion=1.0
+# func_options ${1+"$@"}
+# eval set dummy "$func_options_result"; shift
+# ...rest of your script...
+#
+# In order for the '--version' option to work, you will need to have a
+# suitably formatted comment like the one at the top of this file
+# starting with '# Written by ' and ending with '# Copyright'.
+#
+# For '-h' and '--help' to work, you will also need a one line
+# description of your script's purpose in a comment directly above the
+# '# Written by ' line, like the one at the top of this file.
+#
+# The default options also support '--debug', which will turn on shell
+# execution tracing (see the comment above debug_cmd below for another
+# use), and '--verbose' and the func_verbose function to allow your script
+# to display verbose messages only when your user has specified
+# '--verbose'.
+#
+# After sourcing this file, you can plug in processing for additional
+# options by amending the variables from the 'Configuration' section
+# below, and following the instructions in the 'Option parsing'
+# section further down.
+
+## -------------- ##
+## Configuration. ##
+## -------------- ##
+
+# You should override these variables in your script after sourcing this
+# file so that they reflect the customisations you have added to the
+# option parser.
+
+# The usage line for option parsing errors and the start of '-h' and
+# '--help' output messages. You can embed shell variables for delayed
+# expansion at the time the message is displayed, but you will need to
+# quote other shell meta-characters carefully to prevent them being
+# expanded when the contents are evaled.
+usage='$progpath [OPTION]...'
+
+# Short help message in response to '-h' and '--help'. Add to this or
+# override it after sourcing this library to reflect the full set of
+# options your script accepts.
+usage_message="\
+ --debug enable verbose shell tracing
+ -W, --warnings=CATEGORY
+ report the warnings falling in CATEGORY [all]
+ -v, --verbose verbosely report processing
+ --version print version information and exit
+ -h, --help print short or long help message and exit
+"
+
+# Additional text appended to 'usage_message' in response to '--help'.
+long_help_message="
+Warning categories include:
+ 'all' show all warnings
+ 'none' turn off all the warnings
+ 'error' warnings are treated as fatal errors"
+
+# Help message printed before fatal option parsing errors.
+fatal_help="Try '\$progname --help' for more information."
+
+
+
+## ------------------------- ##
+## Hook function management. ##
+## ------------------------- ##
+
+# This section contains functions for adding, removing, and running hooks
+# in the main code. A hook is just a list of function names that can be
+# run in order later on.
+
+# func_hookable FUNC_NAME
+# -----------------------
+# Declare that FUNC_NAME will run hooks added with
+# 'func_add_hook FUNC_NAME ...'.
+func_hookable ()
+{
+ $debug_cmd
+
+ func_append hookable_fns " $1"
+}
+
+
+# func_add_hook FUNC_NAME HOOK_FUNC
+# ---------------------------------
+# Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must
+# first have been declared "hookable" by a call to 'func_hookable'.
+func_add_hook ()
+{
+ $debug_cmd
+
+ case " $hookable_fns " in
+ *" $1 "*) ;;
+ *) func_fatal_error "'$1' does not accept hook functions." ;;
+ esac
+
+ eval func_append ${1}_hooks '" $2"'
+}
+
+
+# func_remove_hook FUNC_NAME HOOK_FUNC
+# ------------------------------------
+# Remove HOOK_FUNC from the list of hook functions to be called by
+# FUNC_NAME.
+func_remove_hook ()
+{
+ $debug_cmd
+
+ eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`'
+}
+
+
+# func_propagate_result FUNC_NAME_A FUNC_NAME_B
+# ---------------------------------------------
+# If the *_result variable of FUNC_NAME_A _is set_, assign its value to
+# *_result variable of FUNC_NAME_B.
+func_propagate_result ()
+{
+ $debug_cmd
+
+ func_propagate_result_result=:
+ if eval "test \"\${${1}_result+set}\" = set"
+ then
+ eval "${2}_result=\$${1}_result"
+ else
+ func_propagate_result_result=false
+ fi
+}
+
+
+# func_run_hooks FUNC_NAME [ARG]...
+# ---------------------------------
+# Run all hook functions registered to FUNC_NAME.
+# It's assumed that the list of hook functions contains nothing more
+# than a whitespace-delimited list of legal shell function names, and
+# no effort is wasted trying to catch shell meta-characters or preserve
+# whitespace.
+func_run_hooks ()
+{
+ $debug_cmd
+
+ case " $hookable_fns " in
+ *" $1 "*) ;;
+ *) func_fatal_error "'$1' does not support hook functions." ;;
+ esac
+
+ eval _G_hook_fns=\$$1_hooks; shift
+
+ for _G_hook in $_G_hook_fns; do
+ func_unset "${_G_hook}_result"
+ eval $_G_hook '${1+"$@"}'
+ func_propagate_result $_G_hook func_run_hooks
+ if $func_propagate_result_result; then
+ eval set dummy "$func_run_hooks_result"; shift
+ fi
+ done
+}
+
+
+
+## --------------- ##
+## Option parsing. ##
+## --------------- ##
+
+# In order to add your own option parsing hooks, you must accept the
+# full positional parameter list from your hook function. You may remove
+# or edit any options that you action, and then pass back the remaining
+# unprocessed options in '<hooked_function_name>_result', escaped
+# suitably for 'eval'.
+#
+# The '<hooked_function_name>_result' variable is automatically unset
+# before your hook gets called; for best performance, only set the
+# *_result variable when necessary (i.e. don't call the 'func_quote'
+# function unnecessarily because it can be an expensive operation on some
+# machines).
+#
+# Like this:
+#
+# my_options_prep ()
+# {
+# $debug_cmd
+#
+# # Extend the existing usage message.
+# usage_message=$usage_message'
+# -s, --silent don'\''t print informational messages
+# '
+# # No change in '$@' (ignored completely by this hook). Leave
+# # my_options_prep_result variable intact.
+# }
+# func_add_hook func_options_prep my_options_prep
+#
+#
+# my_silent_option ()
+# {
+# $debug_cmd
+#
+# args_changed=false
+#
+# # Note that, for efficiency, we parse as many options as we can
+# # recognise in a loop before passing the remainder back to the
+# # caller on the first unrecognised argument we encounter.
+# while test $# -gt 0; do
+# opt=$1; shift
+# case $opt in
+# --silent|-s) opt_silent=:
+# args_changed=:
+# ;;
+# # Separate non-argument short options:
+# -s*) func_split_short_opt "$_G_opt"
+# set dummy "$func_split_short_opt_name" \
+# "-$func_split_short_opt_arg" ${1+"$@"}
+# shift
+# args_changed=:
+# ;;
+# *) # Make sure the first unrecognised option "$_G_opt"
+# # is added back to "$@" in case we need it later,
+# # if $args_changed was set to 'true'.
+# set dummy "$_G_opt" ${1+"$@"}; shift; break ;;
+# esac
+# done
+#
+# # Only call 'func_quote' here if we processed at least one argument.
+# if $args_changed; then
+# func_quote eval ${1+"$@"}
+# my_silent_option_result=$func_quote_result
+# fi
+# }
+# func_add_hook func_parse_options my_silent_option
+#
+#
+# my_option_validation ()
+# {
+# $debug_cmd
+#
+# $opt_silent && $opt_verbose && func_fatal_help "\
+# '--silent' and '--verbose' options are mutually exclusive."
+# }
+# func_add_hook func_validate_options my_option_validation
+#
+# You'll also need to manually amend $usage_message to reflect the extra
+# options you parse. It's preferable to append if you can, so that
+# multiple option parsing hooks can be added safely.
+
+
+# func_options_finish [ARG]...
+# ----------------------------
+# Finishing the option parse loop (call 'func_options' hooks ATM).
+func_options_finish ()
+{
+ $debug_cmd
+
+ func_run_hooks func_options ${1+"$@"}
+ func_propagate_result func_run_hooks func_options_finish
+}
+
+
+# func_options [ARG]...
+# ---------------------
+# All the functions called inside func_options are hookable. See the
+# individual implementations for details.
+func_hookable func_options
+func_options ()
+{
+ $debug_cmd
+
+ _G_options_quoted=false
+
+ for my_func in options_prep parse_options validate_options options_finish
+ do
+ func_unset func_${my_func}_result
+ func_unset func_run_hooks_result
+ eval func_$my_func '${1+"$@"}'
+ func_propagate_result func_$my_func func_options
+ if $func_propagate_result_result; then
+ eval set dummy "$func_options_result"; shift
+ _G_options_quoted=:
+ fi
+ done
+
+ $_G_options_quoted || {
+ # As we (func_options) are top-level options-parser function and
+ # nobody quoted "$@" for us yet, we need to do it explicitly for
+ # caller.
+ func_quote eval ${1+"$@"}
+ func_options_result=$func_quote_result
+ }
+}
+
+
+# func_options_prep [ARG]...
+# --------------------------
+# All initialisations required before starting the option parse loop.
+# Note that when calling hook functions, we pass through the list of
+# positional parameters. If a hook function modifies that list, and
+# needs to propagate that back to rest of this script, then the complete
+# modified list must be put in 'func_run_hooks_result' before returning.
+func_hookable func_options_prep
+func_options_prep ()
+{
+ $debug_cmd
+
+ # Option defaults:
+ opt_verbose=false
+ opt_warning_types=
+
+ func_run_hooks func_options_prep ${1+"$@"}
+ func_propagate_result func_run_hooks func_options_prep
+}
+
+
+# func_parse_options [ARG]...
+# ---------------------------
+# The main option parsing loop.
+func_hookable func_parse_options
+func_parse_options ()
+{
+ $debug_cmd
+
+ _G_parse_options_requote=false
+ # this just eases exit handling
+ while test $# -gt 0; do
+ # Defer to hook functions for initial option parsing, so they
+ # get priority in the event of reusing an option name.
+ func_run_hooks func_parse_options ${1+"$@"}
+ func_propagate_result func_run_hooks func_parse_options
+ if $func_propagate_result_result; then
+ eval set dummy "$func_parse_options_result"; shift
+ # Even though we may have changed "$@", we passed the "$@" array
+ # down into the hook and it quoted it for us (because we are in
+ # this if-branch). No need to quote it again.
+ _G_parse_options_requote=false
+ fi
+
+ # Break out of the loop if we already parsed every option.
+ test $# -gt 0 || break
+
+ # We expect that one of the options parsed in this function matches
+ # and thus we remove _G_opt from "$@" and need to re-quote.
+ _G_match_parse_options=:
+ _G_opt=$1
+ shift
+ case $_G_opt in
+ --debug|-x) debug_cmd='set -x'
+ func_echo "enabling shell trace mode" >&2
+ $debug_cmd
+ ;;
+
+ --no-warnings|--no-warning|--no-warn)
+ set dummy --warnings none ${1+"$@"}
+ shift
+ ;;
+
+ --warnings|--warning|-W)
+ if test $# = 0 && func_missing_arg $_G_opt; then
+ _G_parse_options_requote=:
+ break
+ fi
+ case " $warning_categories $1" in
+ *" $1 "*)
+ # trailing space prevents matching last $1 above
+ func_append_uniq opt_warning_types " $1"
+ ;;
+ *all)
+ opt_warning_types=$warning_categories
+ ;;
+ *none)
+ opt_warning_types=none
+ warning_func=:
+ ;;
+ *error)
+ opt_warning_types=$warning_categories
+ warning_func=func_fatal_error
+ ;;
+ *)
+ func_fatal_error \
+ "unsupported warning category: '$1'"
+ ;;
+ esac
+ shift
+ ;;
+
+ --verbose|-v) opt_verbose=: ;;
+ --version) func_version ;;
+ -\?|-h) func_usage ;;
+ --help) func_help ;;
+
+ # Separate optargs to long options (plugins may need this):
+ --*=*) func_split_equals "$_G_opt"
+ set dummy "$func_split_equals_lhs" \
+ "$func_split_equals_rhs" ${1+"$@"}
+ shift
+ ;;
+
+ # Separate optargs to short options:
+ -W*)
+ func_split_short_opt "$_G_opt"
+ set dummy "$func_split_short_opt_name" \
+ "$func_split_short_opt_arg" ${1+"$@"}
+ shift
+ ;;
+
+ # Separate non-argument short options:
+ -\?*|-h*|-v*|-x*)
+ func_split_short_opt "$_G_opt"
+ set dummy "$func_split_short_opt_name" \
+ "-$func_split_short_opt_arg" ${1+"$@"}
+ shift
+ ;;
+
+ --) _G_parse_options_requote=: ; break ;;
+ -*) func_fatal_help "unrecognised option: '$_G_opt'" ;;
+ *) set dummy "$_G_opt" ${1+"$@"}; shift
+ _G_match_parse_options=false
+ break
+ ;;
+ esac
+
+ if $_G_match_parse_options; then
+ _G_parse_options_requote=:
+ fi
+ done
+
+ if $_G_parse_options_requote; then
+ # save modified positional parameters for caller
+ func_quote eval ${1+"$@"}
+ func_parse_options_result=$func_quote_result
+ fi
+}
+
+
+# func_validate_options [ARG]...
+# ------------------------------
+# Perform any sanity checks on option settings and/or unconsumed
+# arguments.
+func_hookable func_validate_options
+func_validate_options ()
+{
+ $debug_cmd
+
+ # Display all warnings if -W was not given.
+ test -n "$opt_warning_types" || opt_warning_types=" $warning_categories"
+
+ func_run_hooks func_validate_options ${1+"$@"}
+ func_propagate_result func_run_hooks func_validate_options
+
+ # Bail if the options were screwed!
+ $exit_cmd $EXIT_FAILURE
+}
+
+
+
+## ----------------- ##
+## Helper functions. ##
+## ----------------- ##
+
+# This section contains the helper functions used by the rest of the
+# hookable option parser framework in ascii-betical order.
+
+
+# func_fatal_help ARG...
+# ----------------------
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+ $debug_cmd
+
+ eval \$ECHO \""Usage: $usage"\"
+ eval \$ECHO \""$fatal_help"\"
+ func_error ${1+"$@"}
+ exit $EXIT_FAILURE
+}
+
+
+# func_help
+# ---------
+# Echo long help message to standard output and exit.
+func_help ()
+{
+ $debug_cmd
+
+ func_usage_message
+ $ECHO "$long_help_message"
+ exit 0
+}
+
+
+# func_missing_arg ARGNAME
+# ------------------------
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+ $debug_cmd
+
+ func_error "Missing argument for '$1'."
+ exit_cmd=exit
+}
+
+
+# func_split_equals STRING
+# ------------------------
+# Set func_split_equals_lhs and func_split_equals_rhs shell variables
+# after splitting STRING at the '=' sign.
+test -z "$_G_HAVE_XSI_OPS" \
+ && (eval 'x=a/b/c;
+ test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
+ && _G_HAVE_XSI_OPS=yes
+
+if test yes = "$_G_HAVE_XSI_OPS"
+then
+ # This is an XSI compatible shell, allowing a faster implementation...
+ eval 'func_split_equals ()
+ {
+ $debug_cmd
+
+ func_split_equals_lhs=${1%%=*}
+ func_split_equals_rhs=${1#*=}
+ if test "x$func_split_equals_lhs" = "x$1"; then
+ func_split_equals_rhs=
+ fi
+ }'
+else
+ # ...otherwise fall back to using expr, which is often a shell builtin.
+ func_split_equals ()
+ {
+ $debug_cmd
+
+ func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'`
+ func_split_equals_rhs=
+ test "x$func_split_equals_lhs=" = "x$1" \
+ || func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'`
+ }
+fi #func_split_equals
+
+
+# func_split_short_opt SHORTOPT
+# -----------------------------
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+if test yes = "$_G_HAVE_XSI_OPS"
+then
+ # This is an XSI compatible shell, allowing a faster implementation...
+ eval 'func_split_short_opt ()
+ {
+ $debug_cmd
+
+ func_split_short_opt_arg=${1#??}
+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
+ }'
+else
+ # ...otherwise fall back to using expr, which is often a shell builtin.
+ func_split_short_opt ()
+ {
+ $debug_cmd
+
+ func_split_short_opt_name=`expr "x$1" : 'x\(-.\)'`
+ func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'`
+ }
+fi #func_split_short_opt
+
+
+# func_usage
+# ----------
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+ $debug_cmd
+
+ func_usage_message
+ $ECHO "Run '$progname --help |${PAGER-more}' for full usage"
+ exit 0
+}
+
+
+# func_usage_message
+# ------------------
+# Echo short help message to standard output.
+func_usage_message ()
+{
+ $debug_cmd
+
+ eval \$ECHO \""Usage: $usage"\"
+ echo
+ $SED -n 's|^# ||
+ /^Written by/{
+ x;p;x
+ }
+ h
+ /^Written by/q' < "$progpath"
+ echo
+ eval \$ECHO \""$usage_message"\"
+}
+
+
+# func_version
+# ------------
+# Echo version message to standard output and exit.
+# The version message is extracted from the calling file's header
+# comments, with leading '# ' stripped:
+# 1. First display the progname and version
+# 2. Followed by the header comment line matching /^# Written by /
+# 3. Then a blank line followed by the first following line matching
+# /^# Copyright /
+# 4. Immediately followed by any lines between the previous matches,
+# except lines preceding the intervening completely blank line.
+# For example, see the header comments of this file.
+func_version ()
+{
+ $debug_cmd
+
+ printf '%s\n' "$progname $scriptversion"
+ $SED -n '
+ /^# Written by /!b
+ s|^# ||; p; n
+
+ :fwd2blnk
+ /./ {
+ n
+ b fwd2blnk
+ }
+ p; n
+
+ :holdwrnt
+ s|^# ||
+ s|^# *$||
+ /^Copyright /!{
+ /./H
+ n
+ b holdwrnt
+ }
+
+ s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2|
+ G
+ s|\(\n\)\n*|\1|g
+ p; q' < "$progpath"
+
+ exit $?
+}
+
+
+# Local variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-pattern: "30/scriptversion=%:y-%02m-%02d.%02H; # UTC"
+# time-stamp-time-zone: "UTC"
+# End:
+
+# Set a version string.
+scriptversion='(GNU libtool) 2.4.7.83-7b09-dirty'
+
+
+# func_echo ARG...
+# ----------------
+# Libtool also displays the current mode in messages, so override
+# funclib.sh func_echo with this custom definition.
+func_echo ()
+{
+ $debug_cmd
+
+ _G_message=$*
+
+ func_echo_IFS=$IFS
+ IFS=$nl
+ for _G_line in $_G_message; do
+ IFS=$func_echo_IFS
+ $ECHO "$progname${opt_mode+: $opt_mode}: $_G_line"
+ done
+ IFS=$func_echo_IFS
+}
+
+
+# func_warning ARG...
+# -------------------
+# Libtool warnings are not categorized, so override funclib.sh
+# func_warning with this simpler definition.
+func_warning ()
+{
+ $debug_cmd
+
+ $warning_func ${1+"$@"}
+}
+
+
+## ---------------- ##
+## Options parsing. ##
+## ---------------- ##
+
+# Hook in the functions to make sure our own options are parsed during
+# the option parsing loop.
+
+usage='$progpath [OPTION]... [MODE-ARG]...'
+
+# Short help message in response to '-h'.
+usage_message="Options:
+ --config show all configuration variables
+ --debug enable verbose shell tracing
+ -n, --dry-run display commands without modifying any files
+ --features display basic configuration information and exit
+ --mode=MODE use operation mode MODE
+ --no-warnings equivalent to '-Wnone'
+ --preserve-dup-deps don't remove duplicate dependency libraries
+ --quiet, --silent don't print informational messages
+ --tag=TAG use configuration variables from tag TAG
+ -v, --verbose print more informational messages than default
+ --version print version information
+ -W, --warnings=CATEGORY report the warnings falling in CATEGORY [all]
+ -h, --help, --help-all print short, long, or detailed help message
+"
+
+# Additional text appended to 'usage_message' in response to '--help'.
+func_help ()
+{
+ $debug_cmd
+
+ func_usage_message
+ $ECHO "$long_help_message
+
+MODE must be one of the following:
+
+ clean remove files from the build directory
+ compile compile a source file into a libtool object
+ execute automatically set library path, then run a program
+ finish complete the installation of libtool libraries
+ install install libraries or executables
+ link create a library or an executable
+ uninstall remove libraries from an installed directory
+
+MODE-ARGS vary depending on the MODE. When passed as first option,
+'--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that.
+Try '$progname --help --mode=MODE' for a more detailed description of MODE.
+
+When reporting a bug, please describe a test case to reproduce it and
+include the following information:
+
+ host-triplet: $host
+ shell: $SHELL
+ compiler: $LTCC
+ compiler flags: $LTCFLAGS
+ linker: $LD (gnu? $with_gnu_ld)
+ version: $progname (GNU libtool) 2.4.7.83-7b09-dirty
+ automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q`
+ autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q`
+
+Report bugs to <bug-libtool@gnu.org>.
+GNU libtool home page: <https://www.gnu.org/s/libtool/>.
+General help using GNU software: <https://www.gnu.org/gethelp/>."
+ exit 0
+}
+
+
+# func_lo2o OBJECT-NAME
+# ---------------------
+# Transform OBJECT-NAME from a '.lo' suffix to the platform specific
+# object suffix.
+
+lo2o=s/\\.lo\$/.$objext/
+o2lo=s/\\.$objext\$/.lo/
+
+if test yes = "$_G_HAVE_XSI_OPS"; then
+ eval 'func_lo2o ()
+ {
+ case $1 in
+ *.lo) func_lo2o_result=${1%.lo}.$objext ;;
+ * ) func_lo2o_result=$1 ;;
+ esac
+ }'
+
+ # func_xform LIBOBJ-OR-SOURCE
+ # ---------------------------
+ # Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise)
+ # suffix to a '.lo' libtool-object suffix.
+ eval 'func_xform ()
+ {
+ func_xform_result=${1%.*}.lo
+ }'
+else
+ # ...otherwise fall back to using sed.
+ func_lo2o ()
+ {
+ func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"`
+ }
+
+ func_xform ()
+ {
+ func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'`
+ }
+fi
+
+
+# func_fatal_configuration ARG...
+# -------------------------------
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+ func_fatal_error ${1+"$@"} \
+ "See the $PACKAGE documentation for more information." \
+ "Fatal configuration error."
+}
+
+
+# func_config
+# -----------
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+ re_begincf='^# ### BEGIN LIBTOOL'
+ re_endcf='^# ### END LIBTOOL'
+
+ # Default configuration.
+ $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+ # Now print the configurations for the tags.
+ for tagname in $taglist; do
+ $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+ done
+
+ exit $?
+}
+
+
+# func_features
+# -------------
+# Display the features supported by this script.
+func_features ()
+{
+ echo "host: $host"
+ if test yes = "$build_libtool_libs"; then
+ echo "enable shared libraries"
+ else
+ echo "disable shared libraries"
+ fi
+ if test yes = "$build_old_libs"; then
+ echo "enable static libraries"
+ else
+ echo "disable static libraries"
+ fi
+
+ exit $?
+}
+
+
+# func_enable_tag TAGNAME
+# -----------------------
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag. We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+ # Global variable:
+ tagname=$1
+
+ re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+ re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+ sed_extractcf=/$re_begincf/,/$re_endcf/p
+
+ # Validate tagname.
+ case $tagname in
+ *[!-_A-Za-z0-9,/]*)
+ func_fatal_error "invalid tag name: $tagname"
+ ;;
+ esac
+
+ # Don't test for the "default" C tag, as we know it's
+ # there but not specially marked.
+ case $tagname in
+ CC) ;;
+ *)
+ if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+ taglist="$taglist $tagname"
+
+ # Evaluate the configuration. Be careful to quote the path
+ # and the sed script, to avoid splitting on whitespace, but
+ # also don't use non-portable quotes within backquotes within
+ # quotes we have to do it in 2 steps:
+ extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+ eval "$extractedcf"
+ else
+ func_error "ignoring unknown tag $tagname"
+ fi
+ ;;
+ esac
+}
+
+
+# func_check_version_match
+# ------------------------
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+ if test "$package_revision" != "$macro_revision"; then
+ if test "$VERSION" != "$macro_version"; then
+ if test -z "$macro_version"; then
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+ else
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+ fi
+ else
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+ fi
+
+ exit $EXIT_MISMATCH
+ fi
+}
+
+
+# libtool_options_prep [ARG]...
+# -----------------------------
+# Preparation for options parsed by libtool.
+libtool_options_prep ()
+{
+ $debug_mode
+
+ # Option defaults:
+ opt_config=false
+ opt_dlopen=
+ opt_dry_run=false
+ opt_help=false
+ opt_mode=
+ opt_preserve_dup_deps=false
+ opt_quiet=false
+
+ nonopt=
+ preserve_args=
+
+ _G_rc_lt_options_prep=:
+
+ # Shorthand for --mode=foo, only valid as the first argument
+ case $1 in
+ clean|clea|cle|cl)
+ shift; set dummy --mode clean ${1+"$@"}; shift
+ ;;
+ compile|compil|compi|comp|com|co|c)
+ shift; set dummy --mode compile ${1+"$@"}; shift
+ ;;
+ execute|execut|execu|exec|exe|ex|e)
+ shift; set dummy --mode execute ${1+"$@"}; shift
+ ;;
+ finish|finis|fini|fin|fi|f)
+ shift; set dummy --mode finish ${1+"$@"}; shift
+ ;;
+ install|instal|insta|inst|ins|in|i)
+ shift; set dummy --mode install ${1+"$@"}; shift
+ ;;
+ link|lin|li|l)
+ shift; set dummy --mode link ${1+"$@"}; shift
+ ;;
+ uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+ shift; set dummy --mode uninstall ${1+"$@"}; shift
+ ;;
+ *)
+ _G_rc_lt_options_prep=false
+ ;;
+ esac
+
+ if $_G_rc_lt_options_prep; then
+ # Pass back the list of options.
+ func_quote eval ${1+"$@"}
+ libtool_options_prep_result=$func_quote_result
+ fi
+}
+func_add_hook func_options_prep libtool_options_prep
+
+
+# libtool_parse_options [ARG]...
+# ---------------------------------
+# Provide handling for libtool specific options.
+libtool_parse_options ()
+{
+ $debug_cmd
+
+ _G_rc_lt_parse_options=false
+
+ # Perform our own loop to consume as many options as possible in
+ # each iteration.
+ while test $# -gt 0; do
+ _G_match_lt_parse_options=:
+ _G_opt=$1
+ shift
+ case $_G_opt in
+ --dry-run|--dryrun|-n)
+ opt_dry_run=:
+ ;;
+
+ --config) func_config ;;
+
+ --dlopen|-dlopen)
+ opt_dlopen="${opt_dlopen+$opt_dlopen
+}$1"
+ shift
+ ;;
+
+ --preserve-dup-deps)
+ opt_preserve_dup_deps=: ;;
+
+ --features) func_features ;;
+
+ --finish) set dummy --mode finish ${1+"$@"}; shift ;;
+
+ --help) opt_help=: ;;
+
+ --help-all) opt_help=': help-all' ;;
+
+ --mode) test $# = 0 && func_missing_arg $_G_opt && break
+ opt_mode=$1
+ case $1 in
+ # Valid mode arguments:
+ clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+ # Catch anything else as an error
+ *) func_error "invalid argument for $_G_opt"
+ exit_cmd=exit
+ break
+ ;;
+ esac
+ shift
+ ;;
+
+ --no-silent|--no-quiet)
+ opt_quiet=false
+ func_append preserve_args " $_G_opt"
+ ;;
+
+ --no-warnings|--no-warning|--no-warn)
+ opt_warning=false
+ func_append preserve_args " $_G_opt"
+ ;;
+
+ --no-verbose)
+ opt_verbose=false
+ func_append preserve_args " $_G_opt"
+ ;;
+
+ --silent|--quiet)
+ opt_quiet=:
+ opt_verbose=false
+ func_append preserve_args " $_G_opt"
+ ;;
+
+ --tag) test $# = 0 && func_missing_arg $_G_opt && break
+ opt_tag=$1
+ func_append preserve_args " $_G_opt $1"
+ func_enable_tag "$1"
+ shift
+ ;;
+
+ --verbose|-v) opt_quiet=false
+ opt_verbose=:
+ func_append preserve_args " $_G_opt"
+ ;;
+
+ # An option not handled by this hook function:
+ *) set dummy "$_G_opt" ${1+"$@"} ; shift
+ _G_match_lt_parse_options=false
+ break
+ ;;
+ esac
+ $_G_match_lt_parse_options && _G_rc_lt_parse_options=:
+ done
+
+ if $_G_rc_lt_parse_options; then
+ # save modified positional parameters for caller
+ func_quote eval ${1+"$@"}
+ libtool_parse_options_result=$func_quote_result
+ fi
+}
+func_add_hook func_parse_options libtool_parse_options
+
+
+
+# libtool_validate_options [ARG]...
+# ---------------------------------
+# Perform any sanity checks on option settings and/or unconsumed
+# arguments.
+libtool_validate_options ()
+{
+ # save first non-option argument
+ if test 0 -lt $#; then
+ nonopt=$1
+ shift
+ fi
+
+ # preserve --debug
+ test : = "$debug_cmd" || func_append preserve_args " --debug"
+
+ case $host_os in
+ # Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452
+ # see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788
+ cygwin* | mingw* | windows* | pw32* | cegcc* | solaris2* | os2*)
+ # don't eliminate duplications in $postdeps and $predeps
+ opt_duplicate_compiler_generated_deps=:
+ ;;
+ *)
+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+ ;;
+ esac
+
+ $opt_help || {
+ # Sanity checks first:
+ func_check_version_match
+
+ test yes != "$build_libtool_libs" \
+ && test yes != "$build_old_libs" \
+ && func_fatal_configuration "not configured to build any kind of library"
+
+ # Darwin sucks
+ eval std_shrext=\"$shrext_cmds\"
+
+ # Only execute mode is allowed to have -dlopen flags.
+ if test -n "$opt_dlopen" && test execute != "$opt_mode"; then
+ func_error "unrecognized option '-dlopen'"
+ $ECHO "$help" 1>&2
+ exit $EXIT_FAILURE
+ fi
+
+ # Change the help message to a mode-specific one.
+ generic_help=$help
+ help="Try '$progname --help --mode=$opt_mode' for more information."
+ }
+
+ # Pass back the unparsed argument list
+ func_quote eval ${1+"$@"}
+ libtool_validate_options_result=$func_quote_result
+}
+func_add_hook func_validate_options libtool_validate_options
+
+
+# Process options as early as possible so that --help and --version
+# can return quickly.
+func_options ${1+"$@"}
+eval set dummy "$func_options_result"; shift
+
+
+
+## ----------- ##
+## Main. ##
+## ----------- ##
+
+magic='%%%MAGIC variable%%%'
+magic_exe='%%%MAGIC EXE variable%%%'
+
+# Global variables.
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end. This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# func_generated_by_libtool
+# True iff stdin has been generated by Libtool. This function is only
+# a basic sanity check; it will hardly flush out determined imposters.
+func_generated_by_libtool_p ()
+{
+ $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_p file
+# True iff FILE is a libtool '.la' library or '.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+ test -f "$1" &&
+ $SED -e 4q "$1" 2>/dev/null | func_generated_by_libtool_p
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool '.la' library or '.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs. To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway. Works if 'file' does not exist.
+func_lalib_unsafe_p ()
+{
+ lalib_p=no
+ if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+ for lalib_p_l in 1 2 3 4
+ do
+ read lalib_p_line
+ case $lalib_p_line in
+ \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+ esac
+ done
+ exec 0<&5 5<&-
+ fi
+ test yes = "$lalib_p"
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+ test -f "$1" &&
+ $lt_truncate_bin < "$1" 2>/dev/null | func_generated_by_libtool_p
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+ func_ltwrapper_exec_suffix=
+ case $1 in
+ *.exe) ;;
+ *) func_ltwrapper_exec_suffix=.exe ;;
+ esac
+ $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+ func_dirname_and_basename "$1" "" "."
+ func_stripname '' '.exe' "$func_basename_result"
+ func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+ func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+ $debug_cmd
+
+ save_ifs=$IFS; IFS='~'
+ for cmd in $1; do
+ IFS=$sp$nl
+ eval cmd=\"$cmd\"
+ IFS=$save_ifs
+ func_show_eval "$cmd" "${2-:}"
+ done
+ IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)! Also, sourcing
+# 'FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+ $debug_cmd
+
+ case $1 in
+ */* | *\\*) . "$1" ;;
+ *) . "./$1" ;;
+ esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot. Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+ func_resolve_sysroot_result=$1
+ case $func_resolve_sysroot_result in
+ =*)
+ func_stripname '=' '' "$func_resolve_sysroot_result"
+ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+ ;;
+ esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+ case $lt_sysroot:$1 in
+ ?*:"$lt_sysroot"*)
+ func_stripname "$lt_sysroot" '' "$1"
+ func_replace_sysroot_result='='$func_stripname_result
+ ;;
+ *)
+ # Including no sysroot.
+ func_replace_sysroot_result=$1
+ ;;
+ esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+ $debug_cmd
+
+ if test -n "$available_tags" && test -z "$tagname"; then
+ CC_quoted=
+ for arg in $CC; do
+ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+ case $@ in
+ # Blanks in the command may have been stripped by the calling shell,
+ # but not from the CC environment variable when configure was run.
+ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+ # Blanks at the start of $base_compile will cause this to fail
+ # if we don't check for them as well.
+ *)
+ for z in $available_tags; do
+ if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+ # Evaluate the configuration.
+ eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+ CC_quoted=
+ for arg in $CC; do
+ # Double-quote args containing other shell metacharacters.
+ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+ case "$@ " in
+ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+ # The compiler in the base compile command matches
+ # the one in the tagged configuration.
+ # Assume this is the tagged configuration we want.
+ tagname=$z
+ break
+ ;;
+ esac
+ fi
+ done
+ # If $tagname still isn't set, then no tagged configuration
+ # was found and let the user know that the "--tag" command
+ # line option must be used.
+ if test -z "$tagname"; then
+ func_echo "unable to infer tagged configuration"
+ func_fatal_error "specify a tag with '--tag'"
+# else
+# func_verbose "using $tagname tagged configuration"
+ fi
+ ;;
+ esac
+ fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+ write_libobj=$1
+ if test yes = "$build_libtool_libs"; then
+ write_lobj=\'$2\'
+ else
+ write_lobj=none
+ fi
+
+ if test yes = "$build_old_libs"; then
+ write_oldobj=\'$3\'
+ else
+ write_oldobj=none
+ fi
+
+ $opt_dry_run || {
+ cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+ $MV "${write_libobj}T" "$write_libobj"
+ }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, windows, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+ $debug_cmd
+
+ func_convert_core_file_wine_to_w32_result=$1
+ if test -n "$1"; then
+ # Unfortunately, winepath does not exit with a non-zero error code, so we
+ # are forced to check the contents of stdout. On the other hand, if the
+ # command is not found, the shell will set an exit code of 127 and print
+ # *an error message* to stdout. So we must check for both error code of
+ # zero AND non-empty stdout, which explains the odd construction:
+ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+ if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then
+ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+ $SED -e "$sed_naive_backslashify"`
+ else
+ func_convert_core_file_wine_to_w32_result=
+ fi
+ fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, windows, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH. Assumes ARG has no leading or trailing path separator
+# characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+ $debug_cmd
+
+ # unfortunately, winepath doesn't convert paths, only file names
+ func_convert_core_path_wine_to_w32_result=
+ if test -n "$1"; then
+ oldIFS=$IFS
+ IFS=:
+ for func_convert_core_path_wine_to_w32_f in $1; do
+ IFS=$oldIFS
+ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+ if test -n "$func_convert_core_file_wine_to_w32_result"; then
+ if test -z "$func_convert_core_path_wine_to_w32_result"; then
+ func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result
+ else
+ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+ fi
+ fi
+ done
+ IFS=$oldIFS
+ fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+ $debug_cmd
+
+ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+ if test "$?" -ne 0; then
+ # on failure, ensure result is empty
+ func_cygpath_result=
+ fi
+ else
+ func_cygpath_result=
+ func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'"
+ fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format. Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+ $debug_cmd
+
+ # awkward: cmd appends spaces to result
+ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+ $SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+ $debug_cmd
+
+ if test -z "$2" && test -n "$1"; then
+ func_error "Could not determine host file name corresponding to"
+ func_error " '$1'"
+ func_error "Continuing, but uninstalled executables may not work."
+ # Fallback:
+ func_to_host_file_result=$1
+ fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+ $debug_cmd
+
+ if test -z "$4" && test -n "$3"; then
+ func_error "Could not determine the host path corresponding to"
+ func_error " '$3'"
+ func_error "Continuing, but uninstalled executables may not work."
+ # Fallback. This is a deliberately simplistic "conversion" and
+ # should not be "improved". See libtool.info.
+ if test "x$1" != "x$2"; then
+ lt_replace_pathsep_chars="s|$1|$2|g"
+ func_to_host_path_result=`echo "$3" |
+ $SED -e "$lt_replace_pathsep_chars"`
+ else
+ func_to_host_path_result=$3
+ fi
+ fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+ $debug_cmd
+
+ case $4 in
+ $1 ) func_to_host_path_result=$3$func_to_host_path_result
+ ;;
+ esac
+ case $4 in
+ $2 ) func_append func_to_host_path_result "$3"
+ ;;
+ esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via '$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+ $debug_cmd
+
+ $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result. If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+ $debug_cmd
+
+ case ,$2, in
+ *,"$to_tool_file_cmd",*)
+ func_to_tool_file_result=$1
+ ;;
+ *)
+ $to_tool_file_cmd "$1"
+ func_to_tool_file_result=$func_to_host_file_result
+ ;;
+ esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+ func_to_host_file_result=$1
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper. Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_file_result=$1
+ if test -n "$1"; then
+ func_convert_core_msys_to_w32 "$1"
+ func_to_host_file_result=$func_convert_core_msys_to_w32_result
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format. Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_file_result=$1
+ if test -n "$1"; then
+ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+ # LT_CYGPATH in this case.
+ func_to_host_file_result=`cygpath -m "$1"`
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format. Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_file_result=$1
+ if test -n "$1"; then
+ func_convert_core_file_wine_to_w32 "$1"
+ func_to_host_file_result=$func_convert_core_file_wine_to_w32_result
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+ $debug_cmd
+
+ func_to_host_file_result=$1
+ if test -n "$1"; then
+ func_convert_core_msys_to_w32 "$1"
+ func_cygpath -u "$func_convert_core_msys_to_w32_result"
+ func_to_host_file_result=$func_cygpath_result
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+ $debug_cmd
+
+ func_to_host_file_result=$1
+ if test -n "$1"; then
+ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+ func_convert_core_file_wine_to_w32 "$1"
+ func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+ func_to_host_file_result=$func_cygpath_result
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via '$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format. If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+# file name conversion function : func_convert_file_X_to_Y ()
+# path conversion function : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same. If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+ $debug_cmd
+
+ if test -z "$to_host_path_cmd"; then
+ func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+ to_host_path_cmd=func_convert_path_$func_stripname_result
+ fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+ $debug_cmd
+
+ func_init_to_host_path_cmd
+ $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+ func_to_host_path_result=$1
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper. Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_path_result=$1
+ if test -n "$1"; then
+ # Remove leading and trailing path separator characters from ARG. MSYS
+ # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+ # and winepath ignores them completely.
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+ func_to_host_path_result=$func_convert_core_msys_to_w32_result
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format. Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_path_result=$1
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format. Requires a wine environment and
+# a working winepath. Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+ $debug_cmd
+
+ func_to_host_path_result=$1
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+ func_to_host_path_result=$func_convert_core_path_wine_to_w32_result
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+ $debug_cmd
+
+ func_to_host_path_result=$1
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+ func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+ func_to_host_path_result=$func_cygpath_result
+ func_convert_path_check : : \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+ fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+ $debug_cmd
+
+ func_to_host_path_result=$1
+ if test -n "$1"; then
+ # Remove leading and trailing path separator characters from
+ # ARG. msys behavior is inconsistent here, cygpath turns them
+ # into '.;' and ';.', and winepath ignores them completely.
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+ func_to_host_path_result=$func_cygpath_result
+ func_convert_path_check : : \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+ fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_dll_def_p FILE
+# True iff FILE is a Windows DLL '.def' file.
+# Keep in sync with _LT_DLL_DEF_P in libtool.m4
+func_dll_def_p ()
+{
+ $debug_cmd
+
+ func_dll_def_p_tmp=`$SED -n \
+ -e 's/^[ ]*//' \
+ -e '/^\(;.*\)*$/d' \
+ -e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \
+ -e q \
+ "$1"`
+ test DEF = "$func_dll_def_p_tmp"
+}
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+ $debug_cmd
+
+ # Get the compilation command and the source file.
+ base_compile=
+ srcfile=$nonopt # always keep a non-empty value in "srcfile"
+ suppress_opt=yes
+ suppress_output=
+ arg_mode=normal
+ libobj=
+ later=
+ pie_flag=
+
+ for arg
+ do
+ case $arg_mode in
+ arg )
+ # do not "continue". Instead, add this to base_compile
+ lastarg=$arg
+ arg_mode=normal
+ ;;
+
+ target )
+ libobj=$arg
+ arg_mode=normal
+ continue
+ ;;
+
+ normal )
+ # Accept any command-line options.
+ case $arg in
+ -o)
+ test -n "$libobj" && \
+ func_fatal_error "you cannot specify '-o' more than once"
+ arg_mode=target
+ continue
+ ;;
+
+ -pie | -fpie | -fPIE)
+ func_append pie_flag " $arg"
+ continue
+ ;;
+
+ -shared | -static | -prefer-pic | -prefer-non-pic)
+ func_append later " $arg"
+ continue
+ ;;
+
+ -no-suppress)
+ suppress_opt=no
+ continue
+ ;;
+
+ -Xcompiler)
+ arg_mode=arg # the next one goes into the "base_compile" arg list
+ continue # The current "srcfile" will either be retained or
+ ;; # replaced later. I would guess that would be a bug.
+
+ -Wc,*)
+ func_stripname '-Wc,' '' "$arg"
+ args=$func_stripname_result
+ lastarg=
+ save_ifs=$IFS; IFS=,
+ for arg in $args; do
+ IFS=$save_ifs
+ func_append_quoted lastarg "$arg"
+ done
+ IFS=$save_ifs
+ func_stripname ' ' '' "$lastarg"
+ lastarg=$func_stripname_result
+
+ # Add the arguments to base_compile.
+ func_append base_compile " $lastarg"
+ continue
+ ;;
+
+ *)
+ # Accept the current argument as the source file.
+ # The previous "srcfile" becomes the current argument.
+ #
+ lastarg=$srcfile
+ srcfile=$arg
+ ;;
+ esac # case $arg
+ ;;
+ esac # case $arg_mode
+
+ # Aesthetically quote the previous argument.
+ func_append_quoted base_compile "$lastarg"
+ done # for arg
+
+ case $arg_mode in
+ arg)
+ func_fatal_error "you must specify an argument for -Xcompile"
+ ;;
+ target)
+ func_fatal_error "you must specify a target with '-o'"
+ ;;
+ *)
+ # Get the name of the library object.
+ test -z "$libobj" && {
+ func_basename "$srcfile"
+ libobj=$func_basename_result
+ }
+ ;;
+ esac
+
+ # Recognize several different file suffixes.
+ # If the user specifies -o file.o, it is replaced with file.lo
+ case $libobj in
+ *.[cCFSifmso] | \
+ *.ada | *.adb | *.ads | *.asm | \
+ *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+ *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
+ func_xform "$libobj"
+ libobj=$func_xform_result
+ ;;
+ esac
+
+ case $libobj in
+ *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+ *)
+ func_fatal_error "cannot determine name of library object from '$libobj'"
+ ;;
+ esac
+
+ func_infer_tag $base_compile
+
+ for arg in $later; do
+ case $arg in
+ -shared)
+ test yes = "$build_libtool_libs" \
+ || func_fatal_configuration "cannot build a shared library"
+ build_old_libs=no
+ continue
+ ;;
+
+ -static)
+ build_libtool_libs=no
+ build_old_libs=yes
+ continue
+ ;;
+
+ -prefer-pic)
+ pic_mode=yes
+ continue
+ ;;
+
+ -prefer-non-pic)
+ pic_mode=no
+ continue
+ ;;
+ esac
+ done
+
+ func_quote_arg pretty "$libobj"
+ test "X$libobj" != "X$func_quote_arg_result" \
+ && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \
+ && func_warning "libobj name '$libobj' may not contain shell special characters."
+ func_dirname_and_basename "$obj" "/" ""
+ objname=$func_basename_result
+ xdir=$func_dirname_result
+ lobj=$xdir$objdir/$objname
+
+ test -z "$base_compile" && \
+ func_fatal_help "you must specify a compilation command"
+
+ # Delete any leftover library objects.
+ if test yes = "$build_old_libs"; then
+ removelist="$obj $lobj $libobj ${libobj}T"
+ else
+ removelist="$lobj $libobj ${libobj}T"
+ fi
+
+ # On Cygwin there's no "real" PIC flag so we must build both object types
+ case $host_os in
+ cygwin* | mingw* | windows* | pw32* | os2* | cegcc*)
+ pic_mode=default
+ ;;
+ esac
+ if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then
+ # non-PIC code in shared libraries is not supported
+ pic_mode=default
+ fi
+
+ # Calculate the filename of the output object if compiler does
+ # not support -o with -c
+ if test no = "$compiler_c_o"; then
+ output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext
+ lockfile=$output_obj.lock
+ else
+ output_obj=
+ need_locks=no
+ lockfile=
+ fi
+
+ # Lock this critical section if it is needed
+ # We use this script file to make the link, it avoids creating a new file
+ if test yes = "$need_locks"; then
+ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+ func_echo "Waiting for $lockfile to be removed"
+ sleep 2
+ done
+ elif test warn = "$need_locks"; then
+ if test -f "$lockfile"; then
+ $ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support '-c' and '-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+ func_append removelist " $output_obj"
+ $ECHO "$srcfile" > "$lockfile"
+ fi
+
+ $opt_dry_run || $RM $removelist
+ func_append removelist " $lockfile"
+ trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+ srcfile=$func_to_tool_file_result
+ func_quote_arg pretty "$srcfile"
+ qsrcfile=$func_quote_arg_result
+
+ # Only build a PIC object if we are building libtool libraries.
+ if test yes = "$build_libtool_libs"; then
+ # Without this assignment, base_compile gets emptied.
+ fbsd_hideous_sh_bug=$base_compile
+
+ if test no != "$pic_mode"; then
+ command="$base_compile $qsrcfile $pic_flag"
+ else
+ # Don't build PIC code
+ command="$base_compile $qsrcfile"
+ fi
+
+ func_mkdir_p "$xdir$objdir"
+
+ if test -z "$output_obj"; then
+ # Place PIC objects in $objdir
+ func_append command " -o $lobj"
+ fi
+
+ func_show_eval_locale "$command" \
+ 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+ if test warn = "$need_locks" &&
+ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+ $ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support '-c' and '-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+
+ # Just move the object if needed, then go on to compile the next one
+ if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+ func_show_eval '$MV "$output_obj" "$lobj"' \
+ 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+ fi
+
+ # Allow error messages only from the first compilation.
+ if test yes = "$suppress_opt"; then
+ suppress_output=' >/dev/null 2>&1'
+ fi
+ fi
+
+ # Only build a position-dependent object if we build old libraries.
+ if test yes = "$build_old_libs"; then
+ if test yes != "$pic_mode"; then
+ # Don't build PIC code
+ command="$base_compile $qsrcfile$pie_flag"
+ else
+ command="$base_compile $qsrcfile $pic_flag"
+ fi
+ if test yes = "$compiler_c_o"; then
+ func_append command " -o $obj"
+ fi
+
+ # Suppress compiler output if we already did a PIC compilation.
+ func_append command "$suppress_output"
+ func_show_eval_locale "$command" \
+ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+ if test warn = "$need_locks" &&
+ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+ $ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support '-c' and '-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+
+ # Just move the object if needed
+ if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+ func_show_eval '$MV "$output_obj" "$obj"' \
+ 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+ fi
+ fi
+
+ $opt_dry_run || {
+ func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+ # Unlock the critical section if it was locked
+ if test no != "$need_locks"; then
+ removelist=$lockfile
+ $RM "$lockfile"
+ fi
+ }
+
+ exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+ test compile = "$opt_mode" && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+ # We need to display help for each of the modes.
+ case $opt_mode in
+ "")
+ # Generic help is extracted from the usage comments
+ # at the start of this file.
+ func_help
+ ;;
+
+ clean)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+ ;;
+
+ compile)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+ -o OUTPUT-FILE set the output file name to OUTPUT-FILE
+ -no-suppress do not suppress compiler output for multiple passes
+ -prefer-pic try to build PIC objects only
+ -prefer-non-pic try to build non-PIC objects only
+ -shared do not build a '.o' file suitable for static linking
+ -static only build a '.o' file suitable for static linking
+ -Wc,FLAG
+ -Xcompiler FLAG pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a 'standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix '.c' with the
+library object suffix, '.lo'."
+ ;;
+
+ execute)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+ -dlopen FILE add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to '-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+ ;;
+
+ finish)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges. Use
+the '--dry-run' option if you just want to see what would be executed."
+ ;;
+
+ install)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command. The first component should be
+either the 'install' or 'cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+ -inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+ ;;
+
+ link)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+ -all-static do not do any dynamic linking at all
+ -avoid-version do not add a version suffix if possible
+ -bindir BINDIR specify path to binaries directory (for systems where
+ libraries must be found in the PATH setting at runtime)
+ -dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime
+ -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
+ -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+ -export-symbols SYMFILE
+ try to export only the symbols listed in SYMFILE
+ -export-symbols-regex REGEX
+ try to export only the symbols matching REGEX
+ -LLIBDIR search LIBDIR for required installed libraries
+ -lNAME OUTPUT-FILE requires the installed library libNAME
+ -module build a library that can dlopened
+ -no-fast-install disable the fast-install mode
+ -no-install link a not-installable executable
+ -no-undefined declare that a library does not refer to external symbols
+ -o OUTPUT-FILE create OUTPUT-FILE from the specified objects
+ -objectlist FILE use a list of object files found in FILE to specify objects
+ -os2dllname NAME force a short DLL name on OS/2 (no effect on other OSes)
+ -precious-files-regex REGEX
+ don't remove output files matching REGEX
+ -release RELEASE specify package release information
+ -rpath LIBDIR the created library will eventually be installed in LIBDIR
+ -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
+ -shared only do dynamic linking of libtool libraries
+ -shrext SUFFIX override the standard shared library file extension
+ -static do not do any dynamic linking of uninstalled libtool libraries
+ -static-libtool-libs
+ do not do any dynamic linking of libtool libraries
+ -version-info CURRENT[:REVISION[:AGE]]
+ specify library version info [each variable defaults to 0]
+ -weak LIBNAME declare that the target provides the LIBNAME interface
+ -Wc,FLAG
+ -Xcompiler FLAG pass linker-specific FLAG directly to the compiler
+ -Wa,FLAG
+ -Xassembler FLAG pass linker-specific FLAG directly to the assembler
+ -Wl,FLAG
+ -Xlinker FLAG pass linker-specific FLAG directly to the linker
+ -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with '-') are ignored.
+
+Every other argument is treated as a filename. Files ending in '.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in '.la', then a libtool library is created,
+only library objects ('.lo' files) may be specified, and '-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created
+using 'ar' and 'ranlib', or on Windows using 'lib'.
+
+If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file
+is created, otherwise an executable program is created."
+ ;;
+
+ uninstall)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+ ;;
+
+ *)
+ func_fatal_help "invalid operation mode '$opt_mode'"
+ ;;
+ esac
+
+ echo
+ $ECHO "Try '$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+ if test : = "$opt_help"; then
+ func_mode_help
+ else
+ {
+ func_help noexit
+ for opt_mode in compile link execute install finish uninstall clean; do
+ func_mode_help
+ done
+ } | $SED -n '1p; 2,$s/^Usage:/ or: /p'
+ {
+ func_help noexit
+ for opt_mode in compile link execute install finish uninstall clean; do
+ echo
+ func_mode_help
+ done
+ } |
+ $SED '1d
+ /^When reporting/,/^Report/{
+ H
+ d
+ }
+ $x
+ /information about other modes/d
+ /more detailed .*MODE/d
+ s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+ fi
+ exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+ $debug_cmd
+
+ # The first argument is the command name.
+ cmd=$nonopt
+ test -z "$cmd" && \
+ func_fatal_help "you must specify a COMMAND"
+
+ # Handle -dlopen flags immediately.
+ for file in $opt_dlopen; do
+ test -f "$file" \
+ || func_fatal_help "'$file' is not a file"
+
+ dir=
+ case $file in
+ *.la)
+ func_resolve_sysroot "$file"
+ file=$func_resolve_sysroot_result
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "'$lib' is not a valid libtool archive"
+
+ # Read the libtool library.
+ dlname=
+ library_names=
+ func_source "$file"
+
+ # Skip this library if it cannot be dlopened.
+ if test -z "$dlname"; then
+ # Warn if it was a shared library.
+ test -n "$library_names" && \
+ func_warning "'$file' was not linked with '-export-dynamic'"
+ continue
+ fi
+
+ func_dirname "$file" "" "."
+ dir=$func_dirname_result
+
+ if test -f "$dir/$objdir/$dlname"; then
+ func_append dir "/$objdir"
+ else
+ if test ! -f "$dir/$dlname"; then
+ func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'"
+ fi
+ fi
+ ;;
+
+ *.lo)
+ # Just add the directory containing the .lo file.
+ func_dirname "$file" "" "."
+ dir=$func_dirname_result
+ ;;
+
+ *)
+ func_warning "'-dlopen' is ignored for non-libtool libraries and objects"
+ continue
+ ;;
+ esac
+
+ # Get the absolute pathname.
+ absdir=`cd "$dir" && pwd`
+ test -n "$absdir" && dir=$absdir
+
+ # Now add the directory to shlibpath_var.
+ if eval "test -z \"\$$shlibpath_var\""; then
+ eval "$shlibpath_var=\"\$dir\""
+ else
+ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+ fi
+ done
+
+ # This variable tells wrapper scripts just to set shlibpath_var
+ # rather than running their programs.
+ libtool_execute_magic=$magic
+
+ # Check if any of the arguments is a wrapper script.
+ args=
+ for file
+ do
+ case $file in
+ -* | *.la | *.lo ) ;;
+ *)
+ # Do a test to see if this is really a libtool program.
+ if func_ltwrapper_script_p "$file"; then
+ func_source "$file"
+ # Transform arg to wrapped name.
+ file=$progdir/$program
+ elif func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ func_source "$func_ltwrapper_scriptname_result"
+ # Transform arg to wrapped name.
+ file=$progdir/$program
+ fi
+ ;;
+ esac
+ # Quote arguments (to preserve shell metacharacters).
+ func_append_quoted args "$file"
+ done
+
+ if $opt_dry_run; then
+ # Display what would be done.
+ if test -n "$shlibpath_var"; then
+ eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+ echo "export $shlibpath_var"
+ fi
+ $ECHO "$cmd$args"
+ exit $EXIT_SUCCESS
+ else
+ if test -n "$shlibpath_var"; then
+ # Export the shlibpath_var.
+ eval "export $shlibpath_var"
+ fi
+
+ # Restore saved environment variables
+ for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+ do
+ eval "if test \"\${save_$lt_var+set}\" = set; then
+ $lt_var=\$save_$lt_var; export $lt_var
+ else
+ $lt_unset $lt_var
+ fi"
+ done
+
+ # Now prepare to actually exec the command.
+ exec_cmd=\$cmd$args
+ fi
+}
+
+test execute = "$opt_mode" && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+ $debug_cmd
+
+ libs=
+ libdirs=
+ admincmds=
+
+ for opt in "$nonopt" ${1+"$@"}
+ do
+ if test -d "$opt"; then
+ func_append libdirs " $opt"
+
+ elif test -f "$opt"; then
+ if func_lalib_unsafe_p "$opt"; then
+ func_append libs " $opt"
+ else
+ func_warning "'$opt' is not a valid libtool archive"
+ fi
+
+ else
+ func_fatal_error "invalid argument '$opt'"
+ fi
+ done
+
+ if test -n "$libs"; then
+ if test -n "$lt_sysroot"; then
+ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+ else
+ sysroot_cmd=
+ fi
+
+ # Remove sysroot references
+ if $opt_dry_run; then
+ for lib in $libs; do
+ echo "removing references to $lt_sysroot and '=' prefixes from $lib"
+ done
+ else
+ tmpdir=`func_mktempdir`
+ for lib in $libs; do
+ $SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+ > $tmpdir/tmp-la
+ mv -f $tmpdir/tmp-la $lib
+ done
+ ${RM}r "$tmpdir"
+ fi
+ fi
+
+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+ for libdir in $libdirs; do
+ if test -n "$finish_cmds"; then
+ # Do each command in the finish commands.
+ func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+ fi
+ if test -n "$finish_eval"; then
+ # Do the single finish_eval.
+ eval cmds=\"$finish_eval\"
+ $opt_dry_run || eval "$cmds" || func_append admincmds "
+ $cmds"
+ fi
+ done
+ fi
+
+ # Exit here if they wanted silent mode.
+ $opt_quiet && exit $EXIT_SUCCESS
+
+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+ echo "----------------------------------------------------------------------"
+ echo "Libraries have been installed in:"
+ for libdir in $libdirs; do
+ $ECHO " $libdir"
+ done
+ echo
+ echo "If you ever happen to want to link against installed libraries"
+ echo "in a given directory, LIBDIR, you must either use libtool, and"
+ echo "specify the full pathname of the library, or use the '-LLIBDIR'"
+ echo "flag during linking and do at least one of the following:"
+ if test -n "$shlibpath_var"; then
+ echo " - add LIBDIR to the '$shlibpath_var' environment variable"
+ echo " during execution"
+ fi
+ if test -n "$runpath_var"; then
+ echo " - add LIBDIR to the '$runpath_var' environment variable"
+ echo " during linking"
+ fi
+ if test -n "$hardcode_libdir_flag_spec"; then
+ libdir=LIBDIR
+ eval flag=\"$hardcode_libdir_flag_spec\"
+
+ $ECHO " - use the '$flag' linker flag"
+ fi
+ if test -n "$admincmds"; then
+ $ECHO " - have your system administrator run these commands:$admincmds"
+ fi
+ if test -f /etc/ld.so.conf; then
+ echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'"
+ fi
+ echo
+
+ echo "See any operating system documentation about shared libraries for"
+ case $host in
+ solaris2.[6789]|solaris2.1[0-9])
+ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+ echo "pages."
+ ;;
+ *)
+ echo "more information, such as the ld(1) and ld.so(8) manual pages."
+ ;;
+ esac
+ echo "----------------------------------------------------------------------"
+ fi
+ exit $EXIT_SUCCESS
+}
+
+test finish = "$opt_mode" && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+ $debug_cmd
+
+ # There may be an optional sh(1) argument at the beginning of
+ # install_prog (especially on Windows NT).
+ if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" ||
+ # Allow the use of GNU shtool's install command.
+ case $nonopt in *shtool*) :;; *) false;; esac
+ then
+ # Aesthetically quote it.
+ func_quote_arg pretty "$nonopt"
+ install_prog="$func_quote_arg_result "
+ arg=$1
+ shift
+ else
+ install_prog=
+ arg=$nonopt
+ fi
+
+ # The real first argument should be the name of the installation program.
+ # Aesthetically quote it.
+ func_quote_arg pretty "$arg"
+ func_append install_prog "$func_quote_arg_result"
+ install_shared_prog=$install_prog
+ case " $install_prog " in
+ *[\\\ /]cp\ *) install_cp=: ;;
+ *) install_cp=false ;;
+ esac
+
+ # We need to accept at least all the BSD install flags.
+ dest=
+ files=
+ opts=
+ prev=
+ install_type=
+ isdir=false
+ stripme=
+ no_mode=:
+ for arg
+ do
+ arg2=
+ if test -n "$dest"; then
+ func_append files " $dest"
+ dest=$arg
+ continue
+ fi
+
+ case $arg in
+ -d) isdir=: ;;
+ -f)
+ if $install_cp; then :; else
+ prev=$arg
+ fi
+ ;;
+ -g | -m | -o)
+ prev=$arg
+ ;;
+ -s)
+ stripme=" -s"
+ continue
+ ;;
+ -*)
+ ;;
+ *)
+ # If the previous option needed an argument, then skip it.
+ if test -n "$prev"; then
+ if test X-m = "X$prev" && test -n "$install_override_mode"; then
+ arg2=$install_override_mode
+ no_mode=false
+ fi
+ prev=
+ else
+ dest=$arg
+ continue
+ fi
+ ;;
+ esac
+
+ # Aesthetically quote the argument.
+ func_quote_arg pretty "$arg"
+ func_append install_prog " $func_quote_arg_result"
+ if test -n "$arg2"; then
+ func_quote_arg pretty "$arg2"
+ fi
+ func_append install_shared_prog " $func_quote_arg_result"
+ done
+
+ test -z "$install_prog" && \
+ func_fatal_help "you must specify an install program"
+
+ test -n "$prev" && \
+ func_fatal_help "the '$prev' option requires an argument"
+
+ if test -n "$install_override_mode" && $no_mode; then
+ if $install_cp; then :; else
+ func_quote_arg pretty "$install_override_mode"
+ func_append install_shared_prog " -m $func_quote_arg_result"
+ fi
+ fi
+
+ if test -z "$files"; then
+ if test -z "$dest"; then
+ func_fatal_help "no file or destination specified"
+ else
+ func_fatal_help "you must specify a destination"
+ fi
+ fi
+
+ # Strip any trailing slash from the destination.
+ func_stripname '' '/' "$dest"
+ dest=$func_stripname_result
+
+ # Check to see that the destination is a directory.
+ test -d "$dest" && isdir=:
+ if $isdir; then
+ destdir=$dest
+ destname=
+ else
+ func_dirname_and_basename "$dest" "" "."
+ destdir=$func_dirname_result
+ destname=$func_basename_result
+
+ # Not a directory, so check to see that there is only one file specified.
+ set dummy $files; shift
+ test "$#" -gt 1 && \
+ func_fatal_help "'$dest' is not a directory"
+ fi
+ case $destdir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ for file in $files; do
+ case $file in
+ *.lo) ;;
+ *)
+ func_fatal_help "'$destdir' must be an absolute directory name"
+ ;;
+ esac
+ done
+ ;;
+ esac
+
+ # This variable tells wrapper scripts just to set variables rather
+ # than running their programs.
+ libtool_install_magic=$magic
+
+ staticlibs=
+ future_libdirs=
+ current_libdirs=
+ for file in $files; do
+
+ # Do each installation.
+ case $file in
+ *.$libext)
+ # Do the static libraries later.
+ func_append staticlibs " $file"
+ ;;
+
+ *.la)
+ func_resolve_sysroot "$file"
+ file=$func_resolve_sysroot_result
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "'$file' is not a valid libtool archive"
+
+ library_names=
+ old_library=
+ relink_command=
+ func_source "$file"
+
+ # Add the libdir to current_libdirs if it is the destination.
+ if test "X$destdir" = "X$libdir"; then
+ case "$current_libdirs " in
+ *" $libdir "*) ;;
+ *) func_append current_libdirs " $libdir" ;;
+ esac
+ else
+ # Note the libdir as a future libdir.
+ case "$future_libdirs " in
+ *" $libdir "*) ;;
+ *) func_append future_libdirs " $libdir" ;;
+ esac
+ fi
+
+ func_dirname "$file" "/" ""
+ dir=$func_dirname_result
+ func_append dir "$objdir"
+
+ if test -n "$relink_command"; then
+ # Determine the prefix the user has applied to our future dir.
+ inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+ # Don't allow the user to place us outside of our expected
+ # location b/c this prevents finding dependent libraries that
+ # are installed to the same prefix.
+ # At present, this check doesn't affect windows .dll's that
+ # are installed into $libdir/../bin (currently, that works fine)
+ # but it's something to keep an eye on.
+ test "$inst_prefix_dir" = "$destdir" && \
+ func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir"
+
+ if test -n "$inst_prefix_dir"; then
+ # Stick the inst_prefix_dir data into the link command.
+ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+ else
+ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+ fi
+
+ func_warning "relinking '$file'"
+ func_show_eval "$relink_command" \
+ 'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"'
+ fi
+
+ # See the names of the shared library.
+ set dummy $library_names; shift
+ if test -n "$1"; then
+ realname=$1
+ shift
+
+ srcname=$realname
+ test -n "$relink_command" && srcname=${realname}T
+
+ # Install the shared library and build the symlinks.
+ func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+ 'exit $?'
+ tstripme=$stripme
+ case $host_os in
+ cygwin* | mingw* | windows* | pw32* | cegcc*)
+ case $realname in
+ *.dll.a)
+ tstripme=
+ ;;
+ esac
+ ;;
+ os2*)
+ case $realname in
+ *_dll.a)
+ tstripme=
+ ;;
+ esac
+ ;;
+ esac
+ if test -n "$tstripme" && test -n "$striplib"; then
+ func_show_eval "$striplib $destdir/$realname" 'exit $?'
+ fi
+
+ if test "$#" -gt 0; then
+ # Delete the old symlinks, and create new ones.
+ # Try 'ln -sf' first, because the 'ln' binary might depend on
+ # the symlink we replace! Solaris /bin/ln does not understand -f,
+ # so we also need to try rm && ln -s.
+ for linkname
+ do
+ test "$linkname" != "$realname" \
+ && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+ done
+ fi
+
+ # Do each command in the postinstall commands.
+ lib=$destdir/$realname
+ func_execute_cmds "$postinstall_cmds" 'exit $?'
+ fi
+
+ # Install the pseudo-library for information purposes.
+ func_basename "$file"
+ name=$func_basename_result
+ instname=$dir/${name}i
+ func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+ # Maybe install the static library, too.
+ test -n "$old_library" && func_append staticlibs " $dir/$old_library"
+ ;;
+
+ *.lo)
+ # Install (i.e. copy) a libtool object.
+
+ # Figure out destination file name, if it wasn't already specified.
+ if test -n "$destname"; then
+ destfile=$destdir/$destname
+ else
+ func_basename "$file"
+ destfile=$func_basename_result
+ destfile=$destdir/$destfile
+ fi
+
+ # Deduce the name of the destination old-style object file.
+ case $destfile in
+ *.lo)
+ func_lo2o "$destfile"
+ staticdest=$func_lo2o_result
+ ;;
+ *.$objext)
+ staticdest=$destfile
+ destfile=
+ ;;
+ *)
+ func_fatal_help "cannot copy a libtool object to '$destfile'"
+ ;;
+ esac
+
+ # Install the libtool object if requested.
+ test -n "$destfile" && \
+ func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+ # Install the old object if enabled.
+ if test yes = "$build_old_libs"; then
+ # Deduce the name of the old-style object file.
+ func_lo2o "$file"
+ staticobj=$func_lo2o_result
+ func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+ fi
+ exit $EXIT_SUCCESS
+ ;;
+
+ *)
+ # Figure out destination file name, if it wasn't already specified.
+ if test -n "$destname"; then
+ destfile=$destdir/$destname
+ else
+ func_basename "$file"
+ destfile=$func_basename_result
+ destfile=$destdir/$destfile
+ fi
+
+ # If the file is missing, and there is a .exe on the end, strip it
+ # because it is most likely a libtool script we actually want to
+ # install
+ stripped_ext=
+ case $file in
+ *.exe)
+ if test ! -f "$file"; then
+ func_stripname '' '.exe' "$file"
+ file=$func_stripname_result
+ stripped_ext=.exe
+ fi
+ ;;
+ esac
+
+ # Do a test to see if this is really a libtool program.
+ case $host in
+ *cygwin* | *mingw* | *windows*)
+ if func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ wrapper=$func_ltwrapper_scriptname_result
+ else
+ func_stripname '' '.exe' "$file"
+ wrapper=$func_stripname_result
+ fi
+ ;;
+ *)
+ wrapper=$file
+ ;;
+ esac
+ if func_ltwrapper_script_p "$wrapper"; then
+ notinst_deplibs=
+ relink_command=
+
+ func_source "$wrapper"
+
+ # Check the variables that should have been set.
+ test -z "$generated_by_libtool_version" && \
+ func_fatal_error "invalid libtool wrapper script '$wrapper'"
+
+ finalize=:
+ for lib in $notinst_deplibs; do
+ # Check to see that each library is installed.
+ libdir=
+ if test -f "$lib"; then
+ func_source "$lib"
+ fi
+ libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'`
+ if test -n "$libdir" && test ! -f "$libfile"; then
+ func_warning "'$lib' has not been installed in '$libdir'"
+ finalize=false
+ fi
+ done
+
+ relink_command=
+ func_source "$wrapper"
+
+ outputname=
+ if test no = "$fast_install" && test -n "$relink_command"; then
+ $opt_dry_run || {
+ if $finalize; then
+ tmpdir=`func_mktempdir`
+ func_basename "$file$stripped_ext"
+ file=$func_basename_result
+ outputname=$tmpdir/$file
+ # Replace the output file specification.
+ relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+ $opt_quiet || {
+ func_quote_arg expand,pretty "$relink_command"
+ eval "func_echo $func_quote_arg_result"
+ }
+ if eval "$relink_command"; then :
+ else
+ func_error "error: relink '$file' with the above command before installing it"
+ $opt_dry_run || ${RM}r "$tmpdir"
+ continue
+ fi
+ file=$outputname
+ else
+ func_warning "cannot relink '$file'"
+ fi
+ }
+ else
+ # Install the binary that we compiled earlier.
+ file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+ fi
+ fi
+
+ # remove .exe since cygwin /usr/bin/install will append another
+ # one anyway
+ case $install_prog,$host in
+ */usr/bin/install*,*cygwin*)
+ case $file:$destfile in
+ *.exe:*.exe)
+ # this is ok
+ ;;
+ *.exe:*)
+ destfile=$destfile.exe
+ ;;
+ *:*.exe)
+ func_stripname '' '.exe' "$destfile"
+ destfile=$func_stripname_result
+ ;;
+ esac
+ ;;
+ esac
+ func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+ $opt_dry_run || if test -n "$outputname"; then
+ ${RM}r "$tmpdir"
+ fi
+ ;;
+ esac
+ done
+
+ for file in $staticlibs; do
+ func_basename "$file"
+ name=$func_basename_result
+
+ # Set up the ranlib parameters.
+ oldlib=$destdir/$name
+ func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+ tool_oldlib=$func_to_tool_file_result
+
+ func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+ if test -n "$stripme" && test -n "$old_striplib"; then
+ func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
+ fi
+
+ # Do each command in the postinstall commands.
+ func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+ done
+
+ test -n "$future_libdirs" && \
+ func_warning "remember to run '$progname --finish$future_libdirs'"
+
+ if test -n "$current_libdirs"; then
+ # Maybe just do a dry run.
+ $opt_dry_run && current_libdirs=" -n$current_libdirs"
+ exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs'
+ else
+ exit $EXIT_SUCCESS
+ fi
+}
+
+test install = "$opt_mode" && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+ $debug_cmd
+
+ my_outputname=$1
+ my_originator=$2
+ my_pic_p=${3-false}
+ my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'`
+ my_dlsyms=
+
+ if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
+ if test -n "$NM" && test -n "$global_symbol_pipe"; then
+ my_dlsyms=${my_outputname}S.c
+ else
+ func_error "not configured to extract global symbols from dlpreopened files"
+ fi
+ fi
+
+ if test -n "$my_dlsyms"; then
+ case $my_dlsyms in
+ "") ;;
+ *.c)
+ # Discover the nlist of each of the dlfiles.
+ nlist=$output_objdir/$my_outputname.nm
+
+ func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+ # Parse the name list into a source file.
+ func_verbose "creating $output_objdir/$my_dlsyms"
+
+ $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
+#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE
+/* DATA imports from DLLs on WIN32 can't be const, because runtime
+ relocations are performed -- see ld's documentation on pseudo-relocs. */
+# define LT_DLSYM_CONST
+#elif defined __osf__
+/* This system does not cope well with relocations in const data. */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
+
+/* External symbol declarations for the compiler. */\
+"
+
+ if test yes = "$dlself"; then
+ func_verbose "generating symbol list for '$output'"
+
+ $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+ # Add our own program objects to the symbol list.
+ progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ for progfile in $progfiles; do
+ func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+ func_verbose "extracting global C symbols from '$func_to_tool_file_result'"
+ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+ done
+
+ if test -n "$exclude_expsyms"; then
+ $opt_dry_run || {
+ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+ if test -n "$export_symbols_regex"; then
+ $opt_dry_run || {
+ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+ # Prepare the list of exported symbols
+ if test -z "$export_symbols"; then
+ export_symbols=$output_objdir/$outputname.exp
+ $opt_dry_run || {
+ $RM $export_symbols
+ eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+ case $host in
+ *cygwin* | *mingw* | *windows* | *cegcc* )
+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+ else
+ $opt_dry_run || {
+ eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ case $host in
+ *cygwin* | *mingw* | *windows* | *cegcc* )
+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+ fi
+ fi
+
+ for dlprefile in $dlprefiles; do
+ func_verbose "extracting global C symbols from '$dlprefile'"
+ func_basename "$dlprefile"
+ name=$func_basename_result
+ case $host in
+ *cygwin* | *mingw* | *windows* | *cegcc* )
+ # if an import library, we need to obtain dlname
+ if func_win32_import_lib_p "$dlprefile"; then
+ func_tr_sh "$dlprefile"
+ eval "curr_lafile=\$libfile_$func_tr_sh_result"
+ dlprefile_dlbasename=
+ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+ # Use subshell, to avoid clobbering current variable values
+ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+ if test -n "$dlprefile_dlname"; then
+ func_basename "$dlprefile_dlname"
+ dlprefile_dlbasename=$func_basename_result
+ else
+ # no lafile. user explicitly requested -dlpreopen <import library>.
+ $sharedlib_from_linklib_cmd "$dlprefile"
+ dlprefile_dlbasename=$sharedlib_from_linklib_result
+ fi
+ fi
+ $opt_dry_run || {
+ if test -n "$dlprefile_dlbasename"; then
+ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+ else
+ func_warning "Could not compute DLL name from $name"
+ eval '$ECHO ": $name " >> "$nlist"'
+ fi
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+ }
+ else # not an import lib
+ $opt_dry_run || {
+ eval '$ECHO ": $name " >> "$nlist"'
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+ }
+ fi
+ ;;
+ *)
+ $opt_dry_run || {
+ eval '$ECHO ": $name " >> "$nlist"'
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+ }
+ ;;
+ esac
+ done
+
+ $opt_dry_run || {
+ # Make sure we have at least an empty file.
+ test -f "$nlist" || : > "$nlist"
+
+ if test -n "$exclude_expsyms"; then
+ $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+ $MV "$nlist"T "$nlist"
+ fi
+
+ # Try sorting and uniquifying the output.
+ if $GREP -v "^: " < "$nlist" |
+ if sort -k 3 </dev/null >/dev/null 2>&1; then
+ sort -k 3
+ else
+ sort +2
+ fi |
+ uniq > "$nlist"S; then
+ :
+ else
+ $GREP -v "^: " < "$nlist" > "$nlist"S
+ fi
+
+ if test -f "$nlist"S; then
+ eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+ else
+ echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+ fi
+
+ func_show_eval '$RM "${nlist}I"'
+ if test -n "$global_symbol_to_import"; then
+ eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I'
+ fi
+
+ echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols. */
+typedef struct {
+ const char *name;
+ void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];\
+"
+
+ if test -s "$nlist"I; then
+ echo >> "$output_objdir/$my_dlsyms" "\
+static void lt_syminit(void)
+{
+ LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols;
+ for (; symbol->name; ++symbol)
+ {"
+ $SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms"
+ echo >> "$output_objdir/$my_dlsyms" "\
+ }
+}"
+ fi
+ echo >> "$output_objdir/$my_dlsyms" "\
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{ {\"$my_originator\", (void *) 0},"
+
+ if test -s "$nlist"I; then
+ echo >> "$output_objdir/$my_dlsyms" "\
+ {\"@INIT@\", (void *) &lt_syminit},"
+ fi
+
+ case $need_lib_prefix in
+ no)
+ eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+ ;;
+ *)
+ eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+ ;;
+ esac
+ echo >> "$output_objdir/$my_dlsyms" "\
+ {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+ return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+ } # !$opt_dry_run
+
+ pic_flag_for_symtable=
+ case "$compile_command " in
+ *" -static "*) ;;
+ *)
+ case $host in
+ # compiling the symbol table file with pic_flag works around
+ # a FreeBSD bug that causes programs to crash when -lm is
+ # linked before any other PIC object. But we must not use
+ # pic_flag when linking with -static. The problem exists in
+ # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+ *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+ pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+ *-*-hpux*)
+ pic_flag_for_symtable=" $pic_flag" ;;
+ *)
+ $my_pic_p && pic_flag_for_symtable=" $pic_flag"
+ ;;
+ esac
+ ;;
+ esac
+ symtab_cflags=
+ for arg in $LTCFLAGS; do
+ case $arg in
+ -pie | -fpie | -fPIE) ;;
+ *) func_append symtab_cflags " $arg" ;;
+ esac
+ done
+
+ # Now compile the dynamic symbol file.
+ func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+ # Clean up the generated files.
+ func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"'
+
+ # Transform the symbol file into the correct name.
+ symfileobj=$output_objdir/${my_outputname}S.$objext
+ case $host in
+ *cygwin* | *mingw* | *windows* | *cegcc* )
+ if test -f "$output_objdir/$my_outputname.def"; then
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+ else
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ fi
+ ;;
+ *)
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ ;;
+ esac
+ ;;
+ *)
+ func_fatal_error "unknown suffix for '$my_dlsyms'"
+ ;;
+ esac
+ else
+ # We keep going just in case the user didn't refer to
+ # lt_preloaded_symbols. The linker will fail if global_symbol_pipe
+ # really was required.
+
+ # Nullify the symbol file.
+ compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+ fi
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+ $debug_cmd
+
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+ test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+ $debug_cmd
+
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+ test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+ $debug_cmd
+
+ win32_libid_type=unknown
+ win32_fileres=`file -L $1 2>/dev/null`
+ case $win32_fileres in
+ *ar\ archive\ import\ library*) # definitely import
+ win32_libid_type="x86 archive import"
+ ;;
+ *ar\ archive*) # could be an import, or static
+ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+ case $nm_interface in
+ "MS dumpbin")
+ if func_cygming_ms_implib_p "$1" ||
+ func_cygming_gnu_implib_p "$1"
+ then
+ win32_nmres=import
+ else
+ win32_nmres=
+ fi
+ ;;
+ *)
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+ $SED -n -e '
+ 1,100{
+ / I /{
+ s|.*|import|
+ p
+ q
+ }
+ }'`
+ ;;
+ esac
+ case $win32_nmres in
+ import*) win32_libid_type="x86 archive import";;
+ *) win32_libid_type="x86 archive static";;
+ esac
+ fi
+ ;;
+ *DLL*)
+ win32_libid_type="x86 DLL"
+ ;;
+ *executable*) # but shell scripts are "executable" too...
+ case $win32_fileres in
+ *MS\ Windows\ PE\ Intel*)
+ win32_libid_type="x86 DLL"
+ ;;
+ esac
+ ;;
+ esac
+ $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+# $sharedlib_from_linklib_cmd
+# Result is available in the variable
+# $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+ $debug_cmd
+
+ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+ $debug_cmd
+
+ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+ $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+ $SED '/^Contents of section '"$match_literal"':/{
+ # Place marker at beginning of archive member dllname section
+ s/.*/====MARK====/
+ p
+ d
+ }
+ # These lines can sometimes be longer than 43 characters, but
+ # are always uninteresting
+ /:[ ]*file format pe[i]\{,1\}-/d
+ /^In archive [^:]*:/d
+ # Ensure marker is printed
+ /^====MARK====/p
+ # Remove all lines with less than 43 characters
+ /^.\{43\}/!d
+ # From remaining lines, remove first 43 characters
+ s/^.\{43\}//' |
+ $SED -n '
+ # Join marker and all lines until next marker into a single line
+ /^====MARK====/ b para
+ H
+ $ b para
+ b
+ :para
+ x
+ s/\n//g
+ # Remove the marker
+ s/^====MARK====//
+ # Remove trailing dots and whitespace
+ s/[\. \t]*$//
+ # Print
+ /./p' |
+ # we now have a list, one entry per line, of the stringified
+ # contents of the appropriate section of all members of the
+ # archive that possess that section. Heuristic: eliminate
+ # all those that have a first or second character that is
+ # a '.' (that is, objdump's representation of an unprintable
+ # character.) This should work for all archives with less than
+ # 0x302f exports -- but will fail for DLLs whose name actually
+ # begins with a literal '.' or a single character followed by
+ # a '.'.
+ #
+ # Of those that remain, print the first one.
+ $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+# $sharedlib_from_linklib_cmd
+# Result is available in the variable
+# $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+ $debug_cmd
+
+ if func_cygming_gnu_implib_p "$1"; then
+ # binutils import library
+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+ elif func_cygming_ms_implib_p "$1"; then
+ # ms-generated import library
+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+ else
+ # unknown
+ sharedlib_from_linklib_result=
+ fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+ $debug_cmd
+
+ f_ex_an_ar_dir=$1; shift
+ f_ex_an_ar_oldlib=$1
+ if test yes = "$lock_old_archive_extraction"; then
+ lockfile=$f_ex_an_ar_oldlib.lock
+ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+ func_echo "Waiting for $lockfile to be removed"
+ sleep 2
+ done
+ fi
+ func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+ 'stat=$?; rm -f "$lockfile"; exit $stat'
+ if test yes = "$lock_old_archive_extraction"; then
+ $opt_dry_run || rm -f "$lockfile"
+ fi
+ if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+ :
+ else
+ func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+ fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+ $debug_cmd
+
+ my_gentop=$1; shift
+ my_oldlibs=${1+"$@"}
+ my_oldobjs=
+ my_xlib=
+ my_xabs=
+ my_xdir=
+
+ for my_xlib in $my_oldlibs; do
+ # Extract the objects.
+ case $my_xlib in
+ [\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;;
+ *) my_xabs=`pwd`"/$my_xlib" ;;
+ esac
+ func_basename "$my_xlib"
+ my_xlib=$func_basename_result
+ my_xlib_u=$my_xlib
+ while :; do
+ case " $extracted_archives " in
+ *" $my_xlib_u "*)
+ func_arith $extracted_serial + 1
+ extracted_serial=$func_arith_result
+ my_xlib_u=lt$extracted_serial-$my_xlib ;;
+ *) break ;;
+ esac
+ done
+ extracted_archives="$extracted_archives $my_xlib_u"
+ my_xdir=$my_gentop/$my_xlib_u
+
+ func_mkdir_p "$my_xdir"
+
+ case $host in
+ *-darwin*)
+ func_verbose "Extracting $my_xabs"
+ # Do not bother doing anything if just a dry run
+ $opt_dry_run || {
+ darwin_orig_dir=`pwd`
+ cd $my_xdir || exit $?
+ darwin_archive=$my_xabs
+ darwin_curdir=`pwd`
+ func_basename "$darwin_archive"
+ darwin_base_archive=$func_basename_result
+ darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+ if test -n "$darwin_arches"; then
+ darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+ darwin_arch=
+ func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+ for darwin_arch in $darwin_arches; do
+ func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch"
+ $LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive"
+ cd "unfat-$$/$darwin_base_archive-$darwin_arch"
+ func_extract_an_archive "`pwd`" "$darwin_base_archive"
+ cd "$darwin_curdir"
+ $RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive"
+ done # $darwin_arches
+ ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+ darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$sed_basename" | sort -u`
+ darwin_file=
+ darwin_files=
+ for darwin_file in $darwin_filelist; do
+ darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+ $LIPO -create -output "$darwin_file" $darwin_files
+ done # $darwin_filelist
+ $RM -rf unfat-$$
+ cd "$darwin_orig_dir"
+ else
+ cd $darwin_orig_dir
+ func_extract_an_archive "$my_xdir" "$my_xabs"
+ fi # $darwin_arches
+ } # !$opt_dry_run
+ ;;
+ *)
+ func_extract_an_archive "$my_xdir" "$my_xabs"
+ ;;
+ esac
+ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+ done
+
+ func_extract_archives_result=$my_oldobjs
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw/windows
+# wrapper executable. Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take. If 'yes', then the emitted script
+# will assume that the directory where it is stored is
+# the $objdir directory. This is a cygwin/mingw/windows-specific
+# behavior.
+func_emit_wrapper ()
+{
+ func_emit_wrapper_arg1=${1-no}
+
+ $ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting. It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+ emulate sh
+ NULLCMD=:
+ # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '\${1+\"\$@\"}'='\"\$@\"'
+ setopt NO_GLOB_SUBST
+else
+ case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+ # install mode needs the following variables:
+ generated_by_libtool_version='$macro_version'
+ notinst_deplibs='$notinst_deplibs'
+else
+ # When we are sourced in execute mode, \$file and \$ECHO are already set.
+ if test \"\$libtool_execute_magic\" != \"$magic\"; then
+ file=\"\$0\""
+
+ func_quote_arg pretty "$ECHO"
+ qECHO=$func_quote_arg_result
+ $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+ ECHO=$qECHO
+ fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ that is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options that match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+ lt_script_arg0=\$0
+ shift
+ for lt_opt
+ do
+ case \"\$lt_opt\" in
+ --lt-debug) lt_option_debug=1 ;;
+ --lt-dump-script)
+ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+ cat \"\$lt_dump_D/\$lt_dump_F\"
+ exit 0
+ ;;
+ --lt-*)
+ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+ exit 1
+ ;;
+ esac
+ done
+
+ # Print the debug banner immediately:
+ if test -n \"\$lt_option_debug\"; then
+ echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2
+ fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+ lt_dump_args_N=1;
+ for lt_arg
+ do
+ \$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\"
+ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+ done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+ case $host in
+ # Backslashes separate directories on plain windows
+ *-*-mingw* | *-*-windows* | *-*-os2* | *-cegcc*)
+ $ECHO "\
+ if test -n \"\$lt_option_debug\"; then
+ \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2
+ func_lt_dump_args \${1+\"\$@\"} 1>&2
+ fi
+ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+ ;;
+
+ *)
+ $ECHO "\
+ if test -n \"\$lt_option_debug\"; then
+ \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2
+ func_lt_dump_args \${1+\"\$@\"} 1>&2
+ fi
+ exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+ ;;
+ esac
+ $ECHO "\
+ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+ exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+ case \" \$* \" in
+ *\\ --lt-*)
+ for lt_wr_arg
+ do
+ case \$lt_wr_arg in
+ --lt-*) ;;
+ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+ esac
+ shift
+ done ;;
+ esac
+ func_exec_program_core \${1+\"\$@\"}
+}
+
+ # Parse options
+ func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+ # Find the directory that this script lives in.
+ thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+ test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+ # Follow symbolic links until we get to the real thisdir.
+ file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+ while test -n \"\$file\"; do
+ destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+ # If there was a directory component, then change thisdir.
+ if test \"x\$destdir\" != \"x\$file\"; then
+ case \"\$destdir\" in
+ [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+ *) thisdir=\"\$thisdir/\$destdir\" ;;
+ esac
+ fi
+
+ file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+ file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+ done
+
+ # Usually 'no', except on cygwin/mingw/windows when embedded into
+ # the cwrapper.
+ WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+ if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+ # special case for '.'
+ if test \"\$thisdir\" = \".\"; then
+ thisdir=\`pwd\`
+ fi
+ # remove .libs from thisdir
+ case \"\$thisdir\" in
+ *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+ $objdir ) thisdir=. ;;
+ esac
+ fi
+
+ # Try to get the absolute directory name.
+ absdir=\`cd \"\$thisdir\" && pwd\`
+ test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+ if test yes = "$fast_install"; then
+ $ECHO "\
+ program=lt-'$outputname'$exeext
+ progdir=\"\$thisdir/$objdir\"
+
+ if test ! -f \"\$progdir/\$program\" ||
+ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\
+ test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+ file=\"\$\$-\$program\"
+
+ if test ! -d \"\$progdir\"; then
+ $MKDIR \"\$progdir\"
+ else
+ $RM \"\$progdir/\$file\"
+ fi"
+
+ $ECHO "\
+
+ # relink executable if necessary
+ if test -n \"\$relink_command\"; then
+ if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+ else
+ \$ECHO \"\$relink_command_output\" >&2
+ $RM \"\$progdir/\$file\"
+ exit 1
+ fi
+ fi
+
+ $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+ { $RM \"\$progdir/\$program\";
+ $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+ $RM \"\$progdir/\$file\"
+ fi"
+ else
+ $ECHO "\
+ program='$outputname'
+ progdir=\"\$thisdir/$objdir\"
+"
+ fi
+
+ $ECHO "\
+
+ if test -f \"\$progdir/\$program\"; then"
+
+ # fixup the dll searchpath if we need to.
+ #
+ # Fix the DLL searchpath if we need to. Do this before prepending
+ # to shlibpath, because on Windows, both are PATH and uninstalled
+ # libraries must come first.
+ if test -n "$dllsearchpath"; then
+ $ECHO "\
+ # Add the dll search path components to the executable PATH
+ PATH=$dllsearchpath:\$PATH
+"
+ fi
+
+ # Export our shlibpath_var if we have one.
+ if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+ $ECHO "\
+ # Add our own library path to $shlibpath_var
+ $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+ # Some systems cannot cope with colon-terminated $shlibpath_var
+ # The second colon is a workaround for a bug in BeOS R4 sed
+ $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+ export $shlibpath_var
+"
+ fi
+
+ $ECHO "\
+ if test \"\$libtool_execute_magic\" != \"$magic\"; then
+ # Run the actual program with our arguments.
+ func_exec_program \${1+\"\$@\"}
+ fi
+ else
+ # The program doesn't exist.
+ \$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2
+ \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+ \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+ exit 1
+ fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+ cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+ Generated by $PROGRAM (GNU $PACKAGE) $VERSION
+
+ The $output program cannot be directly executed until all the libtool
+ libraries that it depends on are installed.
+
+ This wrapper executable should never be moved out of the build directory.
+ If it is, it will not operate correctly.
+*/
+EOF
+ cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#if defined (_WIN32) && !defined (__GNUC__)
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+# include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
+
+/* declarations of non-ANSI functions */
+#if defined __MINGW32__
+# ifdef __STRICT_ANSI__
+_CRTIMP int __cdecl _putenv (const char *);
+# endif
+#elif defined __CYGWIN__
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined other_platform || defined ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined _MSC_VER
+# define setmode _setmode
+# define stat _stat
+# define chmod _chmod
+# define getcwd _getcwd
+# define putenv _putenv
+# define S_IXUSR _S_IEXEC
+#elif defined __MINGW32__
+# define setmode _setmode
+# define stat _stat
+# define chmod _chmod
+# define getcwd _getcwd
+# define putenv _putenv
+#elif defined __CYGWIN__
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined other platforms ... */
+#endif
+
+#if defined PATH_MAX
+# define LT_PATHMAX PATH_MAX
+#elif defined MAXPATHLEN
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \
+ defined __OS2__
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+# define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+# define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+ if (stale) { free (stale); stale = 0; } \
+} while (0)
+
+#if defined LT_DEBUGWRAPPER
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+ cat <<EOF
+#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 5)
+# define externally_visible volatile
+#else
+# define externally_visible __attribute__((externally_visible)) volatile
+#endif
+externally_visible const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+ if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+ func_to_host_path "$temp_rpath"
+ cat <<EOF
+const char * LIB_PATH_VALUE = "$func_to_host_path_result";
+EOF
+ else
+ cat <<"EOF"
+const char * LIB_PATH_VALUE = "";
+EOF
+ fi
+
+ if test -n "$dllsearchpath"; then
+ func_to_host_path "$dllsearchpath:"
+ cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE = "$func_to_host_path_result";
+EOF
+ else
+ cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE = "";
+EOF
+ fi
+
+ if test yes = "$fast_install"; then
+ cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+ else
+ cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+ fi
+
+
+ cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+ char **newargz;
+ int newargc;
+ char *tmp_pathspec;
+ char *actual_cwrapper_path;
+ char *actual_cwrapper_name;
+ char *target_name;
+ char *lt_argv_zero;
+ int rval = 127;
+
+ int i;
+
+ program_name = (char *) xstrdup (base_name (argv[0]));
+ newargz = XMALLOC (char *, (size_t) argc + 1);
+
+ /* very simple arg parsing; don't want to rely on getopt
+ * also, copy all non cwrapper options to newargz, except
+ * argz[0], which is handled differently
+ */
+ newargc=0;
+ for (i = 1; i < argc; i++)
+ {
+ if (STREQ (argv[i], dumpscript_opt))
+ {
+EOF
+ case $host in
+ *mingw* | *windows* | *cygwin* )
+ # make stdout use "unix" line endings
+ echo " setmode(1,_O_BINARY);"
+ ;;
+ esac
+
+ cat <<"EOF"
+ lt_dump_script (stdout);
+ return 0;
+ }
+ if (STREQ (argv[i], debug_opt))
+ {
+ lt_debug = 1;
+ continue;
+ }
+ if (STREQ (argv[i], ltwrapper_option_prefix))
+ {
+ /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+ namespace, but it is not one of the ones we know about and
+ have already dealt with, above (including dump-script), then
+ report an error. Otherwise, targets might begin to believe
+ they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+ namespace. The first time any user complains about this, we'll
+ need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+ or a configure.ac-settable value.
+ */
+ lt_fatal (__FILE__, __LINE__,
+ "unrecognized %s option: '%s'",
+ ltwrapper_option_prefix, argv[i]);
+ }
+ /* otherwise ... */
+ newargz[++newargc] = xstrdup (argv[i]);
+ }
+ newargz[++newargc] = NULL;
+
+EOF
+ cat <<EOF
+ /* The GNU banner must be the first non-error debug message */
+ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE) $VERSION\n");
+EOF
+ cat <<"EOF"
+ lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+ lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+ tmp_pathspec = find_executable (argv[0]);
+ if (tmp_pathspec == NULL)
+ lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) found exe (before symlink chase) at: %s\n",
+ tmp_pathspec);
+
+ actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) found exe (after symlink chase) at: %s\n",
+ actual_cwrapper_path);
+ XFREE (tmp_pathspec);
+
+ actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+ strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+ /* wrapper name transforms */
+ strendzap (actual_cwrapper_name, ".exe");
+ tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+ XFREE (actual_cwrapper_name);
+ actual_cwrapper_name = tmp_pathspec;
+ tmp_pathspec = 0;
+
+ /* target_name transforms -- use actual target program name; might have lt- prefix */
+ target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+ strendzap (target_name, ".exe");
+ tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+ XFREE (target_name);
+ target_name = tmp_pathspec;
+ tmp_pathspec = 0;
+
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) libtool target name: %s\n",
+ target_name);
+EOF
+
+ cat <<EOF
+ newargz[0] =
+ XMALLOC (char, (strlen (actual_cwrapper_path) +
+ strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+ strcpy (newargz[0], actual_cwrapper_path);
+ strcat (newargz[0], "$objdir");
+ strcat (newargz[0], "/");
+EOF
+
+ cat <<"EOF"
+ /* stop here, and copy so we don't have to do this twice */
+ tmp_pathspec = xstrdup (newargz[0]);
+
+ /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+ strcat (newargz[0], actual_cwrapper_name);
+
+ /* DO want the lt- prefix here if it exists, so use target_name */
+ lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+ XFREE (tmp_pathspec);
+ tmp_pathspec = NULL;
+EOF
+
+ case $host_os in
+ mingw* | windows*)
+ cat <<"EOF"
+ {
+ char* p;
+ while ((p = strchr (newargz[0], '\\')) != NULL)
+ {
+ *p = '/';
+ }
+ while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+ {
+ *p = '/';
+ }
+ }
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+ XFREE (target_name);
+ XFREE (actual_cwrapper_path);
+ XFREE (actual_cwrapper_name);
+
+ lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+ lt_setenv ("DUALCASE", "1"); /* for MSK sh */
+ /* Update the DLL searchpath. EXE_PATH_VALUE ($dllsearchpath) must
+ be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+ because on Windows, both *_VARNAMEs are PATH but uninstalled
+ libraries must come first. */
+ lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+ lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+ nonnull (lt_argv_zero));
+ for (i = 0; i < newargc; i++)
+ {
+ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+ i, nonnull (newargz[i]));
+ }
+
+EOF
+
+ case $host_os in
+ mingw* | windows*)
+ cat <<"EOF"
+ /* execv doesn't actually work on mingw as expected on unix */
+ newargz = prepare_spawn (newargz);
+ rval = (int) _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+ if (rval == -1)
+ {
+ /* failed to start process */
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) failed to launch target \"%s\": %s\n",
+ lt_argv_zero, nonnull (strerror (errno)));
+ return 127;
+ }
+ return rval;
+EOF
+ ;;
+ *)
+ cat <<"EOF"
+ execv (lt_argv_zero, newargz);
+ return rval; /* =127, but avoids unused variable warning */
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+ void *p = (void *) malloc (num);
+ if (!p)
+ lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+ return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+ return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+ string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+ const char *base;
+
+#if defined HAVE_DOS_BASED_FILE_SYSTEM
+ /* Skip over the disk name in MSDOS pathnames. */
+ if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+ name += 2;
+#endif
+
+ for (base = name; *name; name++)
+ if (IS_DIR_SEPARATOR (*name))
+ base = name + 1;
+ return base;
+}
+
+int
+check_executable (const char *path)
+{
+ struct stat st;
+
+ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+ if ((stat (path, &st) >= 0)
+ && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+ return 1;
+ else
+ return 0;
+}
+
+int
+make_executable (const char *path)
+{
+ int rval = 0;
+ struct stat st;
+
+ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+ if (stat (path, &st) >= 0)
+ {
+ rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+ }
+ return rval;
+}
+
+/* Searches for the full path of the wrapper. Returns
+ newly allocated full path name if found, NULL otherwise
+ Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+ int has_slash = 0;
+ const char *p;
+ const char *p_next;
+ /* static buffer for getcwd */
+ char tmp[LT_PATHMAX + 1];
+ size_t tmp_len;
+ char *concat_name;
+
+ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+ nonempty (wrapper));
+
+ if ((wrapper == NULL) || (*wrapper == '\0'))
+ return NULL;
+
+ /* Absolute path? */
+#if defined HAVE_DOS_BASED_FILE_SYSTEM
+ if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+ {
+ concat_name = xstrdup (wrapper);
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+ else
+ {
+#endif
+ if (IS_DIR_SEPARATOR (wrapper[0]))
+ {
+ concat_name = xstrdup (wrapper);
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+#if defined HAVE_DOS_BASED_FILE_SYSTEM
+ }
+#endif
+
+ for (p = wrapper; *p; p++)
+ if (*p == '/')
+ {
+ has_slash = 1;
+ break;
+ }
+ if (!has_slash)
+ {
+ /* no slashes; search PATH */
+ const char *path = getenv ("PATH");
+ if (path != NULL)
+ {
+ for (p = path; *p; p = p_next)
+ {
+ const char *q;
+ size_t p_len;
+ for (q = p; *q; q++)
+ if (IS_PATH_SEPARATOR (*q))
+ break;
+ p_len = (size_t) (q - p);
+ p_next = (*q == '\0' ? q : q + 1);
+ if (p_len == 0)
+ {
+ /* empty path: current directory */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name =
+ XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, tmp, tmp_len);
+ concat_name[tmp_len] = '/';
+ strcpy (concat_name + tmp_len + 1, wrapper);
+ }
+ else
+ {
+ concat_name =
+ XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, p, p_len);
+ concat_name[p_len] = '/';
+ strcpy (concat_name + p_len + 1, wrapper);
+ }
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+ }
+ /* not found in PATH; assume curdir */
+ }
+ /* Relative path | not found in path: prepend cwd */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, tmp, tmp_len);
+ concat_name[tmp_len] = '/';
+ strcpy (concat_name + tmp_len + 1, wrapper);
+
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+ return xstrdup (pathspec);
+#else
+ char buf[LT_PATHMAX];
+ struct stat s;
+ char *tmp_pathspec = xstrdup (pathspec);
+ char *p;
+ int has_symlinks = 0;
+ while (strlen (tmp_pathspec) && !has_symlinks)
+ {
+ lt_debugprintf (__FILE__, __LINE__,
+ "checking path component for symlinks: %s\n",
+ tmp_pathspec);
+ if (lstat (tmp_pathspec, &s) == 0)
+ {
+ if (S_ISLNK (s.st_mode) != 0)
+ {
+ has_symlinks = 1;
+ break;
+ }
+
+ /* search backwards for last DIR_SEPARATOR */
+ p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+ while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+ p--;
+ if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+ {
+ /* no more DIR_SEPARATORS left */
+ break;
+ }
+ *p = '\0';
+ }
+ else
+ {
+ lt_fatal (__FILE__, __LINE__,
+ "error accessing file \"%s\": %s",
+ tmp_pathspec, nonnull (strerror (errno)));
+ }
+ }
+ XFREE (tmp_pathspec);
+
+ if (!has_symlinks)
+ {
+ return xstrdup (pathspec);
+ }
+
+ tmp_pathspec = realpath (pathspec, buf);
+ if (tmp_pathspec == 0)
+ {
+ lt_fatal (__FILE__, __LINE__,
+ "could not follow symlinks for %s", pathspec);
+ }
+ return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+ size_t len, patlen;
+
+ assert (str != NULL);
+ assert (pat != NULL);
+
+ len = strlen (str);
+ patlen = strlen (pat);
+
+ if (patlen <= len)
+ {
+ str += len - patlen;
+ if (STREQ (str, pat))
+ *str = '\0';
+ }
+ return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+ va_list args;
+ if (lt_debug)
+ {
+ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+ va_start (args, fmt);
+ (void) vfprintf (stderr, fmt, args);
+ va_end (args);
+ }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+ int line, const char *mode,
+ const char *message, va_list ap)
+{
+ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+ vfprintf (stderr, message, ap);
+ fprintf (stderr, ".\n");
+
+ if (exit_status >= 0)
+ exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+ va_list ap;
+ va_start (ap, message);
+ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+ va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+ return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+ return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_setenv) setting '%s' to '%s'\n",
+ nonnull (name), nonnull (value));
+ {
+#ifdef HAVE_SETENV
+ /* always make a copy, for consistency with !HAVE_SETENV */
+ char *str = xstrdup (value);
+ setenv (name, str, 1);
+#else
+ size_t len = strlen (name) + 1 + strlen (value) + 1;
+ char *str = XMALLOC (char, len);
+ sprintf (str, "%s=%s", name, value);
+ if (putenv (str) != EXIT_SUCCESS)
+ {
+ XFREE (str);
+ }
+#endif
+ }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+ char *new_value;
+ if (orig_value && *orig_value)
+ {
+ size_t orig_value_len = strlen (orig_value);
+ size_t add_len = strlen (add);
+ new_value = XMALLOC (char, add_len + orig_value_len + 1);
+ if (to_end)
+ {
+ strcpy (new_value, orig_value);
+ strcpy (new_value + orig_value_len, add);
+ }
+ else
+ {
+ strcpy (new_value, add);
+ strcpy (new_value + add_len, orig_value);
+ }
+ }
+ else
+ {
+ new_value = xstrdup (add);
+ }
+ return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+ char *new_value = lt_extend_str (getenv (name), value, 0);
+ /* some systems can't cope with a ':'-terminated path #' */
+ size_t len = strlen (new_value);
+ while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+ {
+ new_value[--len] = '\0';
+ }
+ lt_setenv (name, new_value);
+ XFREE (new_value);
+ }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+ char *new_value = lt_extend_str (getenv (name), value, 0);
+ lt_setenv (name, new_value);
+ XFREE (new_value);
+ }
+}
+
+EOF
+ case $host_os in
+ mingw* | windows*)
+ cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+ Note that spawn() does not by itself call the command interpreter
+ (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+ ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&v);
+ v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+ }) ? "cmd.exe" : "command.com").
+ Instead it simply concatenates the arguments, separated by ' ', and calls
+ CreateProcess(). We must quote the arguments since Win32 CreateProcess()
+ interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+ special way:
+ - Space and tab are interpreted as delimiters. They are not treated as
+ delimiters if they are surrounded by double quotes: "...".
+ - Unescaped double quotes are removed from the input. Their only effect is
+ that within double quotes, space and tab are treated like normal
+ characters.
+ - Backslashes not followed by double quotes are not special.
+ - But 2*n+1 backslashes followed by a double quote become
+ n backslashes followed by a double quote (n >= 0):
+ \" -> "
+ \\\" -> \"
+ \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+ size_t argc;
+ char **new_argv;
+ size_t i;
+
+ /* Count number of arguments. */
+ for (argc = 0; argv[argc] != NULL; argc++)
+ ;
+
+ /* Allocate new argument vector. */
+ new_argv = XMALLOC (char *, argc + 1);
+
+ /* Put quoted arguments into the new argument vector. */
+ for (i = 0; i < argc; i++)
+ {
+ const char *string = argv[i];
+
+ if (string[0] == '\0')
+ new_argv[i] = xstrdup ("\"\"");
+ else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+ {
+ int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+ size_t length;
+ unsigned int backslashes;
+ const char *s;
+ char *quoted_string;
+ char *p;
+
+ length = 0;
+ backslashes = 0;
+ if (quote_around)
+ length++;
+ for (s = string; *s != '\0'; s++)
+ {
+ char c = *s;
+ if (c == '"')
+ length += backslashes + 1;
+ length++;
+ if (c == '\\')
+ backslashes++;
+ else
+ backslashes = 0;
+ }
+ if (quote_around)
+ length += backslashes + 1;
+
+ quoted_string = XMALLOC (char, length + 1);
+
+ p = quoted_string;
+ backslashes = 0;
+ if (quote_around)
+ *p++ = '"';
+ for (s = string; *s != '\0'; s++)
+ {
+ char c = *s;
+ if (c == '"')
+ {
+ unsigned int j;
+ for (j = backslashes + 1; j > 0; j--)
+ *p++ = '\\';
+ }
+ *p++ = c;
+ if (c == '\\')
+ backslashes++;
+ else
+ backslashes = 0;
+ }
+ if (quote_around)
+ {
+ unsigned int j;
+ for (j = backslashes; j > 0; j--)
+ *p++ = '\\';
+ *p++ = '"';
+ }
+ *p = '\0';
+
+ new_argv[i] = quoted_string;
+ }
+ else
+ new_argv[i] = (char *) string;
+ }
+ new_argv[argc] = NULL;
+
+ return new_argv;
+}
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+ func_emit_wrapper yes |
+ $SED -n -e '
+s/^\(.\{79\}\)\(..*\)/\1\
+\2/
+h
+s/\([\\"]\)/\\\1/g
+s/$/\\n/
+s/\([^\n]*\).*/ fputs ("\1", f);/p
+g
+D'
+ cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+ $debug_cmd
+
+ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+ *import*) : ;;
+ *) false ;;
+ esac
+}
+
+# func_suncc_cstd_abi
+# !!ONLY CALL THIS FOR SUN CC AFTER $compile_command IS FULLY EXPANDED!!
+# Several compiler flags select an ABI that is incompatible with the
+# Cstd library. Avoid specifying it if any are in CXXFLAGS.
+func_suncc_cstd_abi ()
+{
+ $debug_cmd
+
+ case " $compile_command " in
+ *" -compat=g "*|*\ -std=c++[0-9][0-9]\ *|*" -library=stdcxx4 "*|*" -library=stlport4 "*)
+ suncc_use_cstd_abi=no
+ ;;
+ *)
+ suncc_use_cstd_abi=yes
+ ;;
+ esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+ $debug_cmd
+
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ # It is impossible to link a dll without this setting, and
+ # we shouldn't force the makefile maintainer to figure out
+ # what system we are compiling for in order to pass an extra
+ # flag for every libtool invocation.
+ # allow_undefined=no
+
+ # FIXME: Unfortunately, there are problems with the above when trying
+ # to make a dll that has undefined symbols, in which case not
+ # even a static library is built. For now, we need to specify
+ # -no-undefined on the libtool link line when we can be certain
+ # that all symbols are satisfied, otherwise we get a static library.
+ allow_undefined=yes
+ ;;
+ *)
+ allow_undefined=yes
+ ;;
+ esac
+ libtool_args=$nonopt
+ base_compile="$nonopt $@"
+ compile_command=$nonopt
+ finalize_command=$nonopt
+
+ compile_rpath=
+ finalize_rpath=
+ compile_shlibpath=
+ finalize_shlibpath=
+ convenience=
+ old_convenience=
+ deplibs=
+ old_deplibs=
+ compiler_flags=
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
+ inst_prefix_dir=
+ new_inherited_linker_flags=
+
+ avoid_version=no
+ bindir=
+ dlfiles=
+ dlprefiles=
+ dlself=no
+ export_dynamic=no
+ export_symbols=
+ export_symbols_regex=
+ generated=
+ libobjs=
+ ltlibs=
+ module=no
+ no_install=no
+ objs=
+ os2dllname=
+ non_pic_objects=
+ precious_files_regex=
+ prefer_static_libs=no
+ preload=false
+ prev=
+ prevarg=
+ release=
+ rpath=
+ xrpath=
+ perm_rpath=
+ temp_rpath=
+ thread_safe=no
+ vinfo=
+ vinfo_number=no
+ weak_libs=
+ single_module=$wl-single_module
+ func_infer_tag $base_compile
+
+ # We need to know -static, to get the right output filenames.
+ for arg
+ do
+ case $arg in
+ -shared)
+ test yes != "$build_libtool_libs" \
+ && func_fatal_configuration "cannot build a shared library"
+ build_old_libs=no
+ break
+ ;;
+ -all-static | -static | -static-libtool-libs)
+ case $arg in
+ -all-static)
+ if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then
+ func_warning "complete static linking is impossible in this configuration"
+ fi
+ if test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=yes
+ ;;
+ -static)
+ if test -z "$pic_flag" && test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=built
+ ;;
+ -static-libtool-libs)
+ if test -z "$pic_flag" && test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=yes
+ ;;
+ esac
+ build_libtool_libs=no
+ build_old_libs=yes
+ break
+ ;;
+ esac
+ done
+
+ # See if our shared archives depend on static archives.
+ test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+ # Go through the arguments, transforming them on the way.
+ while test "$#" -gt 0; do
+ arg=$1
+ shift
+ func_quote_arg pretty,unquoted "$arg"
+ qarg=$func_quote_arg_unquoted_result
+ func_append libtool_args " $func_quote_arg_result"
+
+ # If the previous option needs an argument, assign it.
+ if test -n "$prev"; then
+ case $prev in
+ output)
+ func_append compile_command " @OUTPUT@"
+ func_append finalize_command " @OUTPUT@"
+ ;;
+ esac
+
+ case $prev in
+ bindir)
+ bindir=$arg
+ prev=
+ continue
+ ;;
+ dlfiles|dlprefiles)
+ $preload || {
+ # Add the symbol object into the linking commands.
+ func_append compile_command " @SYMFILE@"
+ func_append finalize_command " @SYMFILE@"
+ preload=:
+ }
+ case $arg in
+ *.la | *.lo) ;; # We handle these cases below.
+ force)
+ if test no = "$dlself"; then
+ dlself=needless
+ export_dynamic=yes
+ fi
+ prev=
+ continue
+ ;;
+ self)
+ if test dlprefiles = "$prev"; then
+ dlself=yes
+ elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then
+ dlself=yes
+ else
+ dlself=needless
+ export_dynamic=yes
+ fi
+ prev=
+ continue
+ ;;
+ *)
+ if test dlfiles = "$prev"; then
+ func_append dlfiles " $arg"
+ else
+ func_append dlprefiles " $arg"
+ fi
+ prev=
+ continue
+ ;;
+ esac
+ ;;
+ expsyms)
+ export_symbols=$arg
+ test -f "$arg" \
+ || func_fatal_error "symbol file '$arg' does not exist"
+ prev=
+ continue
+ ;;
+ expsyms_regex)
+ export_symbols_regex=$arg
+ prev=
+ continue
+ ;;
+ framework)
+ case $host in
+ *-*-darwin*)
+ case "$deplibs " in
+ *" $qarg.ltframework "*) ;;
+ *) func_append deplibs " $qarg.ltframework" # this is fixed later
+ ;;
+ esac
+ ;;
+ esac
+ prev=
+ continue
+ ;;
+ inst_prefix)
+ inst_prefix_dir=$arg
+ prev=
+ continue
+ ;;
+ mllvm)
+ # Clang does not use LLVM to link, so we can simply discard any
+ # '-mllvm $arg' options when doing the link step.
+ prev=
+ continue
+ ;;
+ objectlist)
+ if test -f "$arg"; then
+ save_arg=$arg
+ moreargs=
+ for fil in `cat "$save_arg"`
+ do
+# func_append moreargs " $fil"
+ arg=$fil
+ # A libtool-controlled object.
+
+ # Check to see that this really is a libtool object.
+ if func_lalib_unsafe_p "$arg"; then
+ pic_object=
+ non_pic_object=
+
+ # Read the .lo file
+ func_source "$arg"
+
+ if test -z "$pic_object" ||
+ test -z "$non_pic_object" ||
+ test none = "$pic_object" &&
+ test none = "$non_pic_object"; then
+ func_fatal_error "cannot find name of object for '$arg'"
+ fi
+
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir=$func_dirname_result
+
+ if test none != "$pic_object"; then
+ # Prepend the subdirectory the object is found in.
+ pic_object=$xdir$pic_object
+
+ if test dlfiles = "$prev"; then
+ if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
+ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+ # If libtool objects are unsupported, then we need to preload.
+ prev=dlprefiles
+ fi
+ fi
+
+ # CHECK ME: I think I busted this. -Ossama
+ if test dlprefiles = "$prev"; then
+ # Preload the old-style object.
+ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+ # A PIC object.
+ func_append libobjs " $pic_object"
+ arg=$pic_object
+ fi
+
+ # Non-PIC object.
+ if test none != "$non_pic_object"; then
+ # Prepend the subdirectory the object is found in.
+ non_pic_object=$xdir$non_pic_object
+
+ # A standard non-PIC object
+ func_append non_pic_objects " $non_pic_object"
+ if test -z "$pic_object" || test none = "$pic_object"; then
+ arg=$non_pic_object
+ fi
+ else
+ # If the PIC object exists, use it instead.
+ # $xdir was prepended to $pic_object above.
+ non_pic_object=$pic_object
+ func_append non_pic_objects " $non_pic_object"
+ fi
+ else
+ # Only an error if not doing a dry-run.
+ if $opt_dry_run; then
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir=$func_dirname_result
+
+ func_lo2o "$arg"
+ pic_object=$xdir$objdir/$func_lo2o_result
+ non_pic_object=$xdir$func_lo2o_result
+ func_append libobjs " $pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ else
+ func_fatal_error "'$arg' is not a valid libtool object"
+ fi
+ fi
+ done
+ else
+ func_fatal_error "link input file '$arg' does not exist"
+ fi
+ arg=$save_arg
+ prev=
+ continue
+ ;;
+ os2dllname)
+ os2dllname=$arg
+ prev=
+ continue
+ ;;
+ precious_regex)
+ precious_files_regex=$arg
+ prev=
+ continue
+ ;;
+ release)
+ release=-$arg
+ prev=
+ continue
+ ;;
+ rpath | xrpath)
+ # We need an absolute path.
+ case $arg in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ func_fatal_error "only absolute run-paths are allowed"
+ ;;
+ esac
+ if test rpath = "$prev"; then
+ case "$rpath " in
+ *" $arg "*) ;;
+ *) func_append rpath " $arg" ;;
+ esac
+ else
+ case "$xrpath " in
+ *" $arg "*) ;;
+ *) func_append xrpath " $arg" ;;
+ esac
+ fi
+ prev=
+ continue
+ ;;
+ shrext)
+ shrext_cmds=$arg
+ prev=
+ continue
+ ;;
+ weak)
+ func_append weak_libs " $arg"
+ prev=
+ continue
+ ;;
+ xassembler)
+ func_append compiler_flags " -Xassembler $qarg"
+ prev=
+ func_append compile_command " -Xassembler $qarg"
+ func_append finalize_command " -Xassembler $qarg"
+ continue
+ ;;
+ xcclinker)
+ func_append linker_flags " $qarg"
+ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xcompiler)
+ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xlinker)
+ func_append linker_flags " $qarg"
+ func_append compiler_flags " $wl$qarg"
+ prev=
+ func_append compile_command " $wl$qarg"
+ func_append finalize_command " $wl$qarg"
+ continue
+ ;;
+ *)
+ eval "$prev=\"\$arg\""
+ prev=
+ continue
+ ;;
+ esac
+ fi # test -n "$prev"
+
+ prevarg=$arg
+
+ case $arg in
+ -all-static)
+ if test -n "$link_static_flag"; then
+ # See comment for -static flag below, for more details.
+ func_append compile_command " $link_static_flag"
+ func_append finalize_command " $link_static_flag"
+ fi
+ continue
+ ;;
+
+ -allow-undefined)
+ # FIXME: remove this flag sometime in the future.
+ func_fatal_error "'-allow-undefined' must not be used because it is the default"
+ ;;
+
+ -avoid-version)
+ avoid_version=yes
+ continue
+ ;;
+
+ -bindir)
+ prev=bindir
+ continue
+ ;;
+
+ -dlopen)
+ prev=dlfiles
+ continue
+ ;;
+
+ -dlpreopen)
+ prev=dlprefiles
+ continue
+ ;;
+
+ -export-dynamic)
+ export_dynamic=yes
+ continue
+ ;;
+
+ -export-symbols | -export-symbols-regex)
+ if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+ func_fatal_error "more than one -exported-symbols argument is not allowed"
+ fi
+ if test X-export-symbols = "X$arg"; then
+ prev=expsyms
+ else
+ prev=expsyms_regex
+ fi
+ continue
+ ;;
+
+ -framework)
+ prev=framework
+ continue
+ ;;
+
+ -inst-prefix-dir)
+ prev=inst_prefix
+ continue
+ ;;
+
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+ case $with_gcc/$host in
+ no/*-*-irix* | /*-*-irix*)
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ ;;
+ esac
+ continue
+ ;;
+
+ -L*)
+ func_stripname "-L" '' "$arg"
+ if test -z "$func_stripname_result"; then
+ if test "$#" -gt 0; then
+ func_fatal_error "require no space between '-L' and '$1'"
+ else
+ func_fatal_error "need path for '-L' option"
+ fi
+ fi
+ func_resolve_sysroot "$func_stripname_result"
+ dir=$func_resolve_sysroot_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ absdir=`cd "$dir" && pwd`
+ test -z "$absdir" && \
+ func_fatal_error "cannot determine absolute directory name of '$dir'"
+ dir=$absdir
+ ;;
+ esac
+ case "$deplibs " in
+ *" -L$dir "* | *" $arg "*)
+ # Will only happen for absolute or sysroot arguments
+ ;;
+ *)
+ # Preserve sysroot, but never include relative directories
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
+ *) func_append deplibs " -L$dir" ;;
+ esac
+ func_append lib_search_path " $dir"
+ ;;
+ esac
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+ case :$dllsearchpath: in
+ *":$dir:"*) ;;
+ ::) dllsearchpath=$dir;;
+ *) func_append dllsearchpath ":$dir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+ continue
+ ;;
+
+ -l*)
+ if test X-lc = "X$arg" || test X-lm = "X$arg"; then
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+ # These systems don't actually have a C or math library (as such)
+ continue
+ ;;
+ *-*-os2*)
+ # These systems don't actually have a C library (as such)
+ test X-lc = "X$arg" && continue
+ ;;
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-midnightbsd*)
+ # Do not include libc due to us having libc/libc_r.
+ test X-lc = "X$arg" && continue
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C and math libraries are in the System framework
+ func_append deplibs " System.ltframework"
+ continue
+ ;;
+ *-*-sco3.2v5* | *-*-sco5v6*)
+ # Causes problems with __ctype
+ test X-lc = "X$arg" && continue
+ ;;
+ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+ # Compiler inserts libc in the correct place for threads to work
+ test X-lc = "X$arg" && continue
+ ;;
+ esac
+ elif test X-lc_r = "X$arg"; then
+ case $host in
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-midnightbsd*)
+ # Do not include libc_r directly, use -pthread flag.
+ continue
+ ;;
+ esac
+ fi
+ func_append deplibs " $arg"
+ continue
+ ;;
+
+ -mllvm)
+ prev=mllvm
+ continue
+ ;;
+
+ -module)
+ module=yes
+ continue
+ ;;
+
+ # Tru64 UNIX uses -model [arg] to determine the layout of C++
+ # classes, name mangling, and exception handling.
+ # Darwin uses the -arch flag to determine output architecture.
+ -model|-arch|-isysroot|--sysroot)
+ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ prev=xcompiler
+ continue
+ ;;
+ # Solaris ld rejects as of 11.4. Refer to Oracle bug 22985199.
+ -pthread)
+ case $host in
+ *solaris2*) ;;
+ *)
+ case "$new_inherited_linker_flags " in
+ *" $arg "*) ;;
+ * ) func_append new_inherited_linker_flags " $arg" ;;
+ esac
+ ;;
+ esac
+ continue
+ ;;
+ -mt|-mthreads|-kthread|-Kthread|-pthreads|--thread-safe \
+ |-threads|-fopenmp|-fopenmp=*|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ case "$new_inherited_linker_flags " in
+ *" $arg "*) ;;
+ * ) func_append new_inherited_linker_flags " $arg" ;;
+ esac
+ continue
+ ;;
+
+ -multi_module)
+ single_module=$wl-multi_module
+ continue
+ ;;
+
+ -no-fast-install)
+ fast_install=no
+ continue
+ ;;
+
+ -no-install)
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+ # The PATH hackery in wrapper scripts is required on Windows
+ # and Darwin in order for the loader to find any dlls it needs.
+ func_warning "'-no-install' is ignored for $host"
+ func_warning "assuming '-no-fast-install' instead"
+ fast_install=no
+ ;;
+ *) no_install=yes ;;
+ esac
+ continue
+ ;;
+
+ -no-undefined)
+ allow_undefined=no
+ continue
+ ;;
+
+ -objectlist)
+ prev=objectlist
+ continue
+ ;;
+
+ -os2dllname)
+ prev=os2dllname
+ continue
+ ;;
+
+ -o) prev=output ;;
+
+ -precious-files-regex)
+ prev=precious_regex
+ continue
+ ;;
+
+ -release)
+ prev=release
+ continue
+ ;;
+
+ -rpath)
+ prev=rpath
+ continue
+ ;;
+
+ -R)
+ prev=xrpath
+ continue
+ ;;
+
+ -R*)
+ func_stripname '-R' '' "$arg"
+ dir=$func_stripname_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ =*)
+ func_stripname '=' '' "$dir"
+ dir=$lt_sysroot$func_stripname_result
+ ;;
+ *)
+ func_fatal_error "only absolute run-paths are allowed"
+ ;;
+ esac
+ case "$xrpath " in
+ *" $dir "*) ;;
+ *) func_append xrpath " $dir" ;;
+ esac
+ continue
+ ;;
+
+ -shared)
+ # The effects of -shared are defined in a previous loop.
+ continue
+ ;;
+
+ -shrext)
+ prev=shrext
+ continue
+ ;;
+
+ -static | -static-libtool-libs)
+ # The effects of -static are defined in a previous loop.
+ # We used to do the same as -all-static on platforms that
+ # didn't have a PIC flag, but the assumption that the effects
+ # would be equivalent was wrong. It would break on at least
+ # Digital Unix and AIX.
+ continue
+ ;;
+
+ -thread-safe)
+ thread_safe=yes
+ continue
+ ;;
+
+ -version-info)
+ prev=vinfo
+ continue
+ ;;
+
+ -version-number)
+ prev=vinfo
+ vinfo_number=yes
+ continue
+ ;;
+
+ -weak)
+ prev=weak
+ continue
+ ;;
+
+ -Wc,*)
+ func_stripname '-Wc,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs=$IFS; IFS=,
+ for flag in $args; do
+ IFS=$save_ifs
+ func_quote_arg pretty "$flag"
+ func_append arg " $func_quote_arg_result"
+ func_append compiler_flags " $func_quote_arg_result"
+ done
+ IFS=$save_ifs
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
+ -Wl,*)
+ func_stripname '-Wl,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs=$IFS; IFS=,
+ for flag in $args; do
+ IFS=$save_ifs
+ func_quote_arg pretty "$flag"
+ func_append arg " $wl$func_quote_arg_result"
+ func_append compiler_flags " $wl$func_quote_arg_result"
+ func_append linker_flags " $func_quote_arg_result"
+ done
+ IFS=$save_ifs
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
+ -Xassembler)
+ prev=xassembler
+ continue
+ ;;
+
+ -Xcompiler)
+ prev=xcompiler
+ continue
+ ;;
+
+ -Xlinker)
+ prev=xlinker
+ continue
+ ;;
+
+ -XCClinker)
+ prev=xcclinker
+ continue
+ ;;
+
+ # -msg_* for osf cc
+ -msg_*)
+ func_quote_arg pretty "$arg"
+ arg=$func_quote_arg_result
+ ;;
+
+ # Flags to be passed through unchanged, with rationale:
+ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler
+ # -r[0-9][0-9]* specify processor for the SGI compiler
+ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+ # +DA*, +DD* enable 64-bit mode for the HP compiler
+ # -q* compiler args for the IBM compiler
+ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+ # -F/path path to uninstalled frameworks, gcc on darwin
+ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC
+ # -fstack-protector* stack protector flags for GCC
+ # @file GCC response files
+ # -tp=* Portland pgcc target processor selection
+ # --sysroot=* for sysroot support
+ # -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+ # -specs=* GCC specs files
+ # -stdlib=* select c++ std lib with clang
+ # -fdiagnostics-color* simply affects output
+ # -frecord-gcc-switches used to verify flags were respected
+ # -fsanitize=* Clang/GCC memory and address sanitizer
+ # -fno-sanitize* Clang/GCC memory and address sanitizer
+ # -shared-libsan Link with shared sanitizer runtimes (Clang)
+ # -static-libsan Link with static sanitizer runtimes (Clang)
+ # -no-canonical-prefixes Do not expand any symbolic links
+ # -fuse-ld=* Linker select flags for GCC
+ # -rtlib=* select c runtime lib with clang
+ # --unwindlib=* select unwinder library with clang
+ # -f{file|debug|macro|profile}-prefix-map=* needed for lto linking
+ # -Wa,* Pass flags directly to the assembler
+ # -Werror, -Werror=* Report (specified) warnings as errors
+ -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+ -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-no-canonical-prefixes| \
+ -stdlib=*|-rtlib=*|--unwindlib=*| \
+ -specs=*|-fsanitize=*|-fno-sanitize*|-shared-libsan|-static-libsan| \
+ -ffile-prefix-map=*|-fdebug-prefix-map=*|-fmacro-prefix-map=*|-fprofile-prefix-map=*| \
+ -fdiagnostics-color*|-frecord-gcc-switches| \
+ -fuse-ld=*|-Wa,*|-Werror|-Werror=*)
+ func_quote_arg pretty "$arg"
+ arg=$func_quote_arg_result
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ func_append compiler_flags " $arg"
+ continue
+ ;;
+
+ -Z*)
+ if test os2 = "`expr $host : '.*\(os2\)'`"; then
+ # OS/2 uses -Zxxx to specify OS/2-specific options
+ compiler_flags="$compiler_flags $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ case $arg in
+ -Zlinker | -Zstack)
+ prev=xcompiler
+ ;;
+ esac
+ continue
+ else
+ # Otherwise treat like 'Some other compiler flag' below
+ func_quote_arg pretty "$arg"
+ arg=$func_quote_arg_result
+ fi
+ ;;
+
+ # Some other compiler flag.
+ -* | +*)
+ func_quote_arg pretty "$arg"
+ arg=$func_quote_arg_result
+ ;;
+
+ *.$objext)
+ # A standard object.
+ func_append objs " $arg"
+ ;;
+
+ *.lo)
+ # A libtool-controlled object.
+
+ # Check to see that this really is a libtool object.
+ if func_lalib_unsafe_p "$arg"; then
+ pic_object=
+ non_pic_object=
+
+ # Read the .lo file
+ func_source "$arg"
+
+ if test -z "$pic_object" ||
+ test -z "$non_pic_object" ||
+ test none = "$pic_object" &&
+ test none = "$non_pic_object"; then
+ func_fatal_error "cannot find name of object for '$arg'"
+ fi
+
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir=$func_dirname_result
+
+ test none = "$pic_object" || {
+ # Prepend the subdirectory the object is found in.
+ pic_object=$xdir$pic_object
+
+ if test dlfiles = "$prev"; then
+ if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
+ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+ # If libtool objects are unsupported, then we need to preload.
+ prev=dlprefiles
+ fi
+ fi
+
+ # CHECK ME: I think I busted this. -Ossama
+ if test dlprefiles = "$prev"; then
+ # Preload the old-style object.
+ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+ # A PIC object.
+ func_append libobjs " $pic_object"
+ arg=$pic_object
+ }
+
+ # Non-PIC object.
+ if test none != "$non_pic_object"; then
+ # Prepend the subdirectory the object is found in.
+ non_pic_object=$xdir$non_pic_object
+
+ # A standard non-PIC object
+ func_append non_pic_objects " $non_pic_object"
+ if test -z "$pic_object" || test none = "$pic_object"; then
+ arg=$non_pic_object
+ fi
+ else
+ # If the PIC object exists, use it instead.
+ # $xdir was prepended to $pic_object above.
+ non_pic_object=$pic_object
+ func_append non_pic_objects " $non_pic_object"
+ fi
+ else
+ # Only an error if not doing a dry-run.
+ if $opt_dry_run; then
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir=$func_dirname_result
+
+ func_lo2o "$arg"
+ pic_object=$xdir$objdir/$func_lo2o_result
+ non_pic_object=$xdir$func_lo2o_result
+ func_append libobjs " $pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ else
+ func_fatal_error "'$arg' is not a valid libtool object"
+ fi
+ fi
+ ;;
+
+ *.$libext)
+ # An archive.
+ func_append deplibs " $arg"
+ func_append old_deplibs " $arg"
+ continue
+ ;;
+
+ *.la)
+ # A libtool-controlled library.
+
+ func_resolve_sysroot "$arg"
+ if test dlfiles = "$prev"; then
+ # This library was specified with -dlopen.
+ func_append dlfiles " $func_resolve_sysroot_result"
+ prev=
+ elif test dlprefiles = "$prev"; then
+ # The library was specified with -dlpreopen.
+ func_append dlprefiles " $func_resolve_sysroot_result"
+ prev=
+ else
+ func_append deplibs " $func_resolve_sysroot_result"
+ fi
+ continue
+ ;;
+
+ # Some other compiler argument.
+ *)
+ # Unknown arguments in both finalize_command and compile_command need
+ # to be aesthetically quoted because they are evaled later.
+ func_quote_arg pretty "$arg"
+ arg=$func_quote_arg_result
+ ;;
+ esac # arg
+
+ # Now actually substitute the argument into the commands.
+ if test -n "$arg"; then
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ fi
+ done # argument parsing loop
+
+ test -n "$prev" && \
+ func_fatal_help "the '$prevarg' option requires an argument"
+
+ if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then
+ eval arg=\"$export_dynamic_flag_spec\"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ fi
+
+ oldlibs=
+ # calculate the name of the file, without its directory
+ func_basename "$output"
+ outputname=$func_basename_result
+ libobjs_save=$libobjs
+
+ if test -n "$shlibpath_var"; then
+ # get the directories listed in $shlibpath_var
+ eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\`
+ else
+ shlib_search_path=
+ fi
+ eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+ # Definition is injected by LT_CONFIG during libtool generation.
+ func_munge_path_list sys_lib_dlsearch_path "$LT_SYS_LIBRARY_PATH"
+
+ func_dirname "$output" "/" ""
+ output_objdir=$func_dirname_result$objdir
+ func_to_tool_file "$output_objdir/"
+ tool_output_objdir=$func_to_tool_file_result
+ # Create the object directory.
+ func_mkdir_p "$output_objdir"
+
+ # Determine the type of output
+ case $output in
+ "")
+ func_fatal_help "you must specify an output file"
+ ;;
+ *.$libext) linkmode=oldlib ;;
+ *.lo | *.$objext) linkmode=obj ;;
+ *.la) linkmode=lib ;;
+ *) linkmode=prog ;; # Anything else should be a program.
+ esac
+
+ specialdeplibs=
+
+ libs=
+ # Find all interdependent deplibs by searching for libraries
+ # that are linked more than once (e.g. -la -lb -la)
+ for deplib in $deplibs; do
+ if $opt_preserve_dup_deps; then
+ case "$libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append libs " $deplib"
+ done
+
+ if test lib = "$linkmode"; then
+ libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+ # Compute libraries that are listed more than once in $predeps
+ # $postdeps and mark them as special (i.e., whose duplicates are
+ # not to be eliminated).
+ pre_post_deps=
+ if $opt_duplicate_compiler_generated_deps; then
+ for pre_post_dep in $predeps $postdeps; do
+ case "$pre_post_deps " in
+ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
+ esac
+ func_append pre_post_deps " $pre_post_dep"
+ done
+ fi
+ pre_post_deps=
+ fi
+
+ deplibs=
+ newdependency_libs=
+ newlib_search_path=
+ need_relink=no # whether we're linking any uninstalled libtool libraries
+ notinst_deplibs= # not-installed libtool libraries
+ notinst_path= # paths that contain not-installed libtool libraries
+
+ case $linkmode in
+ lib)
+ passes="conv dlpreopen link"
+ for file in $dlfiles $dlprefiles; do
+ case $file in
+ *.la) ;;
+ *)
+ func_fatal_help "libraries can '-dlopen' only libtool libraries: $file"
+ ;;
+ esac
+ done
+ ;;
+ prog)
+ compile_deplibs=
+ finalize_deplibs=
+ alldeplibs=false
+ newdlfiles=
+ newdlprefiles=
+ passes="conv scan dlopen dlpreopen link"
+ ;;
+ *) passes="conv"
+ ;;
+ esac
+
+ for pass in $passes; do
+ # The preopen pass in lib mode reverses $deplibs; put it back here
+ # so that -L comes before libs that need it for instance...
+ if test lib,link = "$linkmode,$pass"; then
+ ## FIXME: Find the place where the list is rebuilt in the wrong
+ ## order, and fix it there properly
+ tmp_deplibs=
+ for deplib in $deplibs; do
+ tmp_deplibs="$deplib $tmp_deplibs"
+ done
+ deplibs=$tmp_deplibs
+ fi
+
+ if test lib,link = "$linkmode,$pass" ||
+ test prog,scan = "$linkmode,$pass"; then
+ libs=$deplibs
+ deplibs=
+ fi
+ if test prog = "$linkmode"; then
+ case $pass in
+ dlopen) libs=$dlfiles ;;
+ dlpreopen) libs=$dlprefiles ;;
+ link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+ esac
+ fi
+ if test lib,dlpreopen = "$linkmode,$pass"; then
+ # Collect and forward deplibs of preopened libtool libs
+ for lib in $dlprefiles; do
+ # Ignore non-libtool-libs
+ dependency_libs=
+ func_resolve_sysroot "$lib"
+ case $lib in
+ *.la) func_source "$func_resolve_sysroot_result" ;;
+ esac
+
+ # Collect preopened libtool deplibs, except any this library
+ # has declared as weak libs
+ for deplib in $dependency_libs; do
+ func_basename "$deplib"
+ deplib_base=$func_basename_result
+ case " $weak_libs " in
+ *" $deplib_base "*) ;;
+ *) func_append deplibs " $deplib" ;;
+ esac
+ done
+ done
+ libs=$dlprefiles
+ fi
+ if test dlopen = "$pass"; then
+ # Collect dlpreopened libraries
+ save_deplibs=$deplibs
+ deplibs=
+ fi
+
+ for deplib in $libs; do
+ lib=
+ found=false
+ case $deplib in
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+ |-threads|-fopenmp|-fopenmp=*|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+ if test prog,link = "$linkmode,$pass"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ func_append compiler_flags " $deplib"
+ if test lib = "$linkmode"; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+ continue
+ ;;
+ -l*)
+ if test lib != "$linkmode" && test prog != "$linkmode"; then
+ func_warning "'-l' is ignored for archives/objects"
+ continue
+ fi
+ func_stripname '-l' '' "$deplib"
+ name=$func_stripname_result
+ if test lib = "$linkmode"; then
+ searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+ else
+ searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+ fi
+ for searchdir in $searchdirs; do
+ for search_ext in .la $std_shrext .so .a; do
+ # Search the libtool library
+ lib=$searchdir/lib$name$search_ext
+ if test -f "$lib"; then
+ if test .la = "$search_ext"; then
+ found=:
+ else
+ found=false
+ fi
+ break 2
+ fi
+ done
+ done
+ if $found; then
+ # deplib is a libtool library
+ # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+ # We need to do some special things here, and not later.
+ if test yes = "$allow_libtool_libs_with_static_runtimes"; then
+ case " $predeps $postdeps " in
+ *" $deplib "*)
+ if func_lalib_p "$lib"; then
+ library_names=
+ old_library=
+ func_source "$lib"
+ for l in $old_library $library_names; do
+ ll=$l
+ done
+ if test "X$ll" = "X$old_library"; then # only static version available
+ found=false
+ func_dirname "$lib" "" "."
+ ladir=$func_dirname_result
+ lib=$ladir/$old_library
+ if test prog,link = "$linkmode,$pass"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ fi
+ fi
+ ;;
+ *) ;;
+ esac
+ fi
+ else
+ # deplib doesn't seem to be a libtool library
+ if test prog,link = "$linkmode,$pass"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ fi
+ ;; # -l
+ *.ltframework)
+ if test prog,link = "$linkmode,$pass"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ if test lib = "$linkmode"; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+ continue
+ ;;
+ -L*)
+ case $linkmode in
+ lib)
+ deplibs="$deplib $deplibs"
+ test conv = "$pass" && continue
+ newdependency_libs="$deplib $newdependency_libs"
+ func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ prog)
+ if test conv = "$pass"; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+ if test scan = "$pass"; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ fi
+ func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ *)
+ func_warning "'-L' is ignored for archives/objects"
+ ;;
+ esac # linkmode
+ continue
+ ;; # -L
+ -R*)
+ if test link = "$pass"; then
+ func_stripname '-R' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ dir=$func_resolve_sysroot_result
+ # Make sure the xrpath contains only unique directories.
+ case "$xrpath " in
+ *" $dir "*) ;;
+ *) func_append xrpath " $dir" ;;
+ esac
+ fi
+ deplibs="$deplib $deplibs"
+ continue
+ ;;
+ *.la)
+ func_resolve_sysroot "$deplib"
+ lib=$func_resolve_sysroot_result
+ ;;
+ *.$libext)
+ if test conv = "$pass"; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+ case $linkmode in
+ lib)
+ # Linking convenience modules into shared libraries is allowed,
+ # but linking other static libraries is non-portable.
+ case " $dlpreconveniencelibs " in
+ *" $deplib "*) ;;
+ *)
+ valid_a_lib=false
+ case $deplibs_check_method in
+ match_pattern*)
+ set dummy $deplibs_check_method; shift
+ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+ | $EGREP "$match_pattern_regex" > /dev/null; then
+ valid_a_lib=:
+ fi
+ ;;
+ pass_all)
+ valid_a_lib=:
+ ;;
+ esac
+ if $valid_a_lib; then
+ func_warning "Linking the shared library $output against the static library $deplib is not portable!"
+ deplibs="$deplib $deplibs"
+ else
+ func_warning "Trying to link with static lib archive $deplib."
+ func_warning "I have the capability to make that library automatically link in when"
+ func_warning "you link to this library. But I can only do this if you have a"
+ func_warning "shared version of the library, which you do not appear to have"
+ func_warning "because the file extensions .$libext of this argument makes me believe"
+ func_warning "that it is just a static archive that I should not use here."
+ fi
+ ;;
+ esac
+ continue
+ ;;
+ prog)
+ if test link != "$pass"; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ fi
+ continue
+ ;;
+ esac # linkmode
+ ;; # *.$libext
+ *.lo | *.$objext)
+ if test conv = "$pass"; then
+ deplibs="$deplib $deplibs"
+ elif test prog = "$linkmode"; then
+ if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then
+ # If there is no dlopen support or we're linking statically,
+ # we need to preload.
+ func_append newdlprefiles " $deplib"
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ func_append newdlfiles " $deplib"
+ fi
+ fi
+ continue
+ ;;
+ %DEPLIBS%)
+ alldeplibs=:
+ continue
+ ;;
+ esac # case $deplib
+
+ $found || test -f "$lib" \
+ || func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'"
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$lib" \
+ || func_fatal_error "'$lib' is not a valid libtool archive"
+
+ func_dirname "$lib" "" "."
+ ladir=$func_dirname_result
+
+ dlname=
+ dlopen=
+ dlpreopen=
+ libdir=
+ library_names=
+ old_library=
+ inherited_linker_flags=
+ # If the library was installed with an old release of libtool,
+ # it will not redefine variables installed, or shouldnotlink
+ installed=yes
+ shouldnotlink=no
+ avoidtemprpath=
+
+
+ # Read the .la file
+ func_source "$lib"
+
+ # Convert "-framework foo" to "foo.ltframework"
+ if test -n "$inherited_linker_flags"; then
+ tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+ for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+ case " $new_inherited_linker_flags " in
+ *" $tmp_inherited_linker_flag "*) ;;
+ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
+ esac
+ done
+ fi
+ dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ if test lib,link = "$linkmode,$pass" ||
+ test prog,scan = "$linkmode,$pass" ||
+ { test prog != "$linkmode" && test lib != "$linkmode"; }; then
+ test -n "$dlopen" && func_append dlfiles " $dlopen"
+ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
+ fi
+
+ if test conv = "$pass"; then
+ # Only check for convenience libraries
+ deplibs="$lib $deplibs"
+ if test -z "$libdir"; then
+ if test -z "$old_library"; then
+ func_fatal_error "cannot find name of link library for '$lib'"
+ fi
+ # It is a libtool convenience library, so add in its objects.
+ func_append convenience " $ladir/$objdir/$old_library"
+ func_append old_convenience " $ladir/$objdir/$old_library"
+ elif test prog != "$linkmode" && test lib != "$linkmode"; then
+ func_fatal_error "'$lib' is not a convenience library"
+ fi
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ deplibs="$deplib $deplibs"
+ if $opt_preserve_dup_deps; then
+ case "$tmp_libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append tmp_libs " $deplib"
+ done
+ continue
+ fi # $pass = conv
+
+
+ # Get the name of the library we link against.
+ linklib=
+ if test -n "$old_library" &&
+ { test yes = "$prefer_static_libs" ||
+ test built,no = "$prefer_static_libs,$installed"; }; then
+ linklib=$old_library
+ else
+ for l in $old_library $library_names; do
+ linklib=$l
+ done
+ fi
+ if test -z "$linklib"; then
+ func_fatal_error "cannot find name of link library for '$lib'"
+ fi
+
+ # This library was specified with -dlopen.
+ if test dlopen = "$pass"; then
+ test -z "$libdir" \
+ && func_fatal_error "cannot -dlopen a convenience library: '$lib'"
+ if test -z "$dlname" ||
+ test yes != "$dlopen_support" ||
+ test no = "$build_libtool_libs"
+ then
+ # If there is no dlname, no dlopen support or we're linking
+ # statically, we need to preload. We also need to preload any
+ # dependent libraries so libltdl's deplib preloader doesn't
+ # bomb out in the load deplibs phase.
+ func_append dlprefiles " $lib $dependency_libs"
+ else
+ func_append newdlfiles " $lib"
+ fi
+ continue
+ fi # $pass = dlopen
+
+ # We need an absolute path.
+ case $ladir in
+ [\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;;
+ *)
+ abs_ladir=`cd "$ladir" && pwd`
+ if test -z "$abs_ladir"; then
+ func_warning "cannot determine absolute directory name of '$ladir'"
+ func_warning "passing it literally to the linker, although it might fail"
+ abs_ladir=$ladir
+ fi
+ ;;
+ esac
+ func_basename "$lib"
+ laname=$func_basename_result
+
+ # Find the relevant object directory and library name.
+ if test yes = "$installed"; then
+ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+ func_warning "library '$lib' was moved."
+ dir=$ladir
+ absdir=$abs_ladir
+ libdir=$abs_ladir
+ else
+ dir=$lt_sysroot$libdir
+ absdir=$lt_sysroot$libdir
+ fi
+ test yes = "$hardcode_automatic" && avoidtemprpath=yes
+ else
+ if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+ dir=$ladir
+ absdir=$abs_ladir
+ # Remove this search path later
+ func_append notinst_path " $abs_ladir"
+ else
+ dir=$ladir/$objdir
+ absdir=$abs_ladir/$objdir
+ # Remove this search path later
+ func_append notinst_path " $abs_ladir"
+ fi
+ fi # $installed = yes
+ func_stripname 'lib' '.la' "$laname"
+ name=$func_stripname_result
+
+ # This library was specified with -dlpreopen.
+ if test dlpreopen = "$pass"; then
+ if test -z "$libdir" && test prog = "$linkmode"; then
+ func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'"
+ fi
+ case $host in
+ # special handling for platforms with PE-DLLs.
+ *cygwin* | *mingw* | *windows* | *cegcc* )
+ # Linker will automatically link against shared library if both
+ # static and shared are present. Therefore, ensure we extract
+ # symbols from the import library if a shared library is present
+ # (otherwise, the dlopen module name will be incorrect). We do
+ # this by putting the import library name into $newdlprefiles.
+ # We recover the dlopen module name by 'saving' the la file
+ # name in a special purpose variable, and (later) extracting the
+ # dlname from the la file.
+ if test -n "$dlname"; then
+ func_tr_sh "$dir/$linklib"
+ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+ func_append newdlprefiles " $dir/$linklib"
+ else
+ func_append newdlprefiles " $dir/$old_library"
+ # Keep a list of preopened convenience libraries to check
+ # that they are being used correctly in the link pass.
+ test -z "$libdir" && \
+ func_append dlpreconveniencelibs " $dir/$old_library"
+ fi
+ ;;
+ * )
+ # Prefer using a static library (so that no silly _DYNAMIC symbols
+ # are required to link).
+ if test -n "$old_library"; then
+ func_append newdlprefiles " $dir/$old_library"
+ # Keep a list of preopened convenience libraries to check
+ # that they are being used correctly in the link pass.
+ test -z "$libdir" && \
+ func_append dlpreconveniencelibs " $dir/$old_library"
+ # Otherwise, use the dlname, so that lt_dlopen finds it.
+ elif test -n "$dlname"; then
+ func_append newdlprefiles " $dir/$dlname"
+ else
+ func_append newdlprefiles " $dir/$linklib"
+ fi
+ ;;
+ esac
+ fi # $pass = dlpreopen
+
+ if test -z "$libdir"; then
+ # Link the convenience library
+ if test lib = "$linkmode"; then
+ deplibs="$dir/$old_library $deplibs"
+ elif test prog,link = "$linkmode,$pass"; then
+ compile_deplibs="$dir/$old_library $compile_deplibs"
+ finalize_deplibs="$dir/$old_library $finalize_deplibs"
+ else
+ deplibs="$lib $deplibs" # used for prog,scan pass
+ fi
+ continue
+ fi
+
+
+ if test prog = "$linkmode" && test link != "$pass"; then
+ func_append newlib_search_path " $ladir"
+ deplibs="$lib $deplibs"
+
+ linkalldeplibs=false
+ if test no != "$link_all_deplibs" || test -z "$library_names" ||
+ test no = "$build_libtool_libs"; then
+ linkalldeplibs=:
+ fi
+
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ esac
+ # Need to link against all dependency_libs?
+ if $linkalldeplibs; then
+ deplibs="$deplib $deplibs"
+ else
+ # Need to hardcode shared library paths
+ # or/and link against static libraries
+ newdependency_libs="$deplib $newdependency_libs"
+ fi
+ if $opt_preserve_dup_deps; then
+ case "$tmp_libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append tmp_libs " $deplib"
+ done # for deplib
+ continue
+ fi # $linkmode = prog...
+
+ if test prog,link = "$linkmode,$pass"; then
+ if test -n "$library_names" &&
+ { { test no = "$prefer_static_libs" ||
+ test built,yes = "$prefer_static_libs,$installed"; } ||
+ test -z "$old_library"; }; then
+ # We need to hardcode the library path
+ if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then
+ # Make sure the rpath contains only unique directories.
+ case $temp_rpath: in
+ *"$absdir:"*) ;;
+ *) func_append temp_rpath "$absdir:" ;;
+ esac
+ fi
+
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+ case " $sys_lib_dlsearch_path " in
+ *" $absdir "*) ;;
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+ case " $sys_lib_dlsearch_path " in
+ *" $libdir "*) ;;
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+ fi # $linkmode,$pass = prog,link...
+
+ if $alldeplibs &&
+ { test pass_all = "$deplibs_check_method" ||
+ { test yes = "$build_libtool_libs" &&
+ test -n "$library_names"; }; }; then
+ # We only need to search for static libraries
+ continue
+ fi
+ fi
+
+ link_static=no # Whether the deplib will be linked statically
+ use_static_libs=$prefer_static_libs
+ if test built = "$use_static_libs" && test yes = "$installed"; then
+ use_static_libs=no
+ fi
+ if test -n "$library_names" &&
+ { test no = "$use_static_libs" || test -z "$old_library"; }; then
+ case $host_os in
+ cygwin* | mingw* | windows* | cegcc* | os2*)
+ # No point in relinking DLLs because paths are not encoded
+ func_append notinst_deplibs " $lib"
+ need_relink=no
+ ;;
+ *)
+ if test no = "$installed"; then
+ func_append notinst_deplibs " $lib"
+ need_relink=yes
+ fi
+ ;;
+ esac
+ # This is a shared library
+
+ # Warn about portability, can't link against -module's on some
+ # systems (darwin). Don't bleat about dlopened modules though!
+ dlopenmodule=
+ for dlpremoduletest in $dlprefiles; do
+ if test "X$dlpremoduletest" = "X$lib"; then
+ dlopenmodule=$dlpremoduletest
+ break
+ fi
+ done
+ if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then
+ echo
+ if test prog = "$linkmode"; then
+ func_warning "Linking the executable $output against the loadable module"
+ else
+ func_warning "Linking the shared library $output against the loadable module"
+ fi
+ func_warning "$linklib is not portable!"
+ fi
+ if test lib = "$linkmode" &&
+ test yes = "$hardcode_into_libs"; then
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+ case " $sys_lib_dlsearch_path " in
+ *" $absdir "*) ;;
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+ case " $sys_lib_dlsearch_path " in
+ *" $libdir "*) ;;
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+ fi
+
+ if test -n "$old_archive_from_expsyms_cmds"; then
+ # figure out the soname
+ set dummy $library_names
+ shift
+ realname=$1
+ shift
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ # use dlname if we got it. it's perfectly good, no?
+ if test -n "$dlname"; then
+ soname=$dlname
+ elif test -n "$soname_spec"; then
+ # bleh windows
+ case $host_os in
+ cygwin* | mingw* | windows* | cegcc* | os2*)
+ func_arith $current - $age
+ major=$func_arith_result
+ versuffix=-$major
+ ;;
+ esac
+ eval soname=\"$soname_spec\"
+ else
+ soname=$realname
+ fi
+
+ # Make a new name for the extract_expsyms_cmds to use
+ soroot=$soname
+ func_basename "$soroot"
+ soname=$func_basename_result
+ func_stripname 'lib' '.dll' "$soname"
+ newlib=libimp-$func_stripname_result.a
+
+ # If the library has no export list, then create one now
+ if test -f "$output_objdir/$soname-def"; then :
+ else
+ func_verbose "extracting exported symbol list from '$soname'"
+ func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+ fi
+
+ # Create $newlib
+ if test -f "$output_objdir/$newlib"; then :; else
+ func_verbose "generating import library for '$soname'"
+ func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+ fi
+ # make sure the library variables are pointing to the new library
+ dir=$output_objdir
+ linklib=$newlib
+ fi # test -n "$old_archive_from_expsyms_cmds"
+
+ if test prog = "$linkmode" || test relink != "$opt_mode"; then
+ add_shlibpath=
+ add_dir=
+ add=
+ lib_linked=yes
+ case $hardcode_action in
+ immediate | unsupported)
+ if test no = "$hardcode_direct"; then
+ add=$dir/$linklib
+ case $host in
+ *-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;;
+ *-*-sysv4*uw2*) add_dir=-L$dir ;;
+ *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+ *-*-unixware7*) add_dir=-L$dir ;;
+ *-*-darwin* )
+ # if the lib is a (non-dlopened) module then we cannot
+ # link against it, someone is ignoring the earlier warnings
+ if /usr/bin/file -L $add 2> /dev/null |
+ $GREP ": [^:]* bundle" >/dev/null; then
+ if test "X$dlopenmodule" != "X$lib"; then
+ func_warning "lib $linklib is a module, not a shared library"
+ if test -z "$old_library"; then
+ func_warning "And there doesn't seem to be a static archive available"
+ func_warning "The link will probably fail, sorry"
+ else
+ add=$dir/$old_library
+ fi
+ elif test -n "$old_library"; then
+ add=$dir/$old_library
+ fi
+ fi
+ esac
+ elif test no = "$hardcode_minus_L"; then
+ case $host in
+ *-*-sunos*) add_shlibpath=$dir ;;
+ esac
+ add_dir=-L$dir
+ add=-l$name
+ elif test no = "$hardcode_shlibpath_var"; then
+ add_shlibpath=$dir
+ add=-l$name
+ else
+ lib_linked=no
+ fi
+ ;;
+ relink)
+ if test yes = "$hardcode_direct" &&
+ test no = "$hardcode_direct_absolute"; then
+ add=$dir/$linklib
+ elif test yes = "$hardcode_minus_L"; then
+ add_dir=-L$absdir
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+ add=-l$name
+ elif test yes = "$hardcode_shlibpath_var"; then
+ add_shlibpath=$dir
+ add=-l$name
+ else
+ lib_linked=no
+ fi
+ ;;
+ *) lib_linked=no ;;
+ esac
+
+ if test yes != "$lib_linked"; then
+ func_fatal_configuration "unsupported hardcode properties"
+ fi
+
+ if test -n "$add_shlibpath"; then
+ case :$compile_shlibpath: in
+ *":$add_shlibpath:"*) ;;
+ *) func_append compile_shlibpath "$add_shlibpath:" ;;
+ esac
+ fi
+ if test prog = "$linkmode"; then
+ test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+ test -n "$add" && compile_deplibs="$add $compile_deplibs"
+ else
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ if test yes != "$hardcode_direct" &&
+ test yes != "$hardcode_minus_L" &&
+ test yes = "$hardcode_shlibpath_var"; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ fi
+ fi
+ fi
+
+ if test prog = "$linkmode" || test relink = "$opt_mode"; then
+ add_shlibpath=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+ if test yes = "$hardcode_direct" &&
+ test no = "$hardcode_direct_absolute"; then
+ add=$libdir/$linklib
+ elif test yes = "$hardcode_minus_L"; then
+ add_dir=-L$libdir
+ add=-l$name
+ elif test yes = "$hardcode_shlibpath_var"; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ add=-l$name
+ elif test yes = "$hardcode_automatic"; then
+ if test -n "$inst_prefix_dir" &&
+ test -f "$inst_prefix_dir$libdir/$linklib"; then
+ add=$inst_prefix_dir$libdir/$linklib
+ else
+ add=$libdir/$linklib
+ fi
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+ add_dir=-L$libdir
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+ add=-l$name
+ fi
+
+ if test prog = "$linkmode"; then
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+ fi
+ elif test prog = "$linkmode"; then
+ # Here we assume that one of hardcode_direct or hardcode_minus_L
+ # is not unsupported. This is valid on all known static and
+ # shared platforms.
+ if test unsupported != "$hardcode_direct"; then
+ test -n "$old_library" && linklib=$old_library
+ compile_deplibs="$dir/$linklib $compile_deplibs"
+ finalize_deplibs="$dir/$linklib $finalize_deplibs"
+ else
+ compile_deplibs="-l$name -L$dir $compile_deplibs"
+ finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+ fi
+ elif test yes = "$build_libtool_libs"; then
+ # Not a shared library
+ if test pass_all != "$deplibs_check_method"; then
+ # We're trying link a shared library against a static one
+ # but the system doesn't support it.
+
+ # Just print a warning and add the library to dependency_libs so
+ # that the program can be linked against the static library.
+ func_warning "This system cannot link to static lib archive $lib."
+ func_warning "I have the capability to make that library automatically link in when"
+ func_warning "you link to this library. But I can only do this if you have a"
+ func_warning "shared version of the library, which you do not appear to have."
+ if test yes = "$module"; then
+ func_warning "But as you try to build a module library, libtool will still create "
+ func_warning "a static module, that should work as long as the dlopening application"
+ func_warning "is linked with the -dlopen flag to resolve symbols at runtime."
+ if test -z "$global_symbol_pipe"; then
+ func_warning "However, this would only work if libtool was able to extract symbol"
+ func_warning "lists from a program, using 'nm' or equivalent, but libtool could"
+ func_warning "not find such a program. So, this module is probably useless."
+ func_warning "'nm' from GNU binutils and a full rebuild may help."
+ fi
+ if test no = "$build_old_libs"; then
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ fi
+ else
+ deplibs="$dir/$old_library $deplibs"
+ link_static=yes
+ fi
+ fi # link shared/static library?
+
+ if test lib = "$linkmode"; then
+ if test -n "$dependency_libs" &&
+ { test yes != "$hardcode_into_libs" ||
+ test yes = "$build_old_libs" ||
+ test yes = "$link_static"; }; then
+ # Extract -R from dependency_libs
+ temp_deplibs=
+ for libdir in $dependency_libs; do
+ case $libdir in
+ -R*) func_stripname '-R' '' "$libdir"
+ temp_xrpath=$func_stripname_result
+ case " $xrpath " in
+ *" $temp_xrpath "*) ;;
+ *) func_append xrpath " $temp_xrpath";;
+ esac;;
+ *) func_append temp_deplibs " $libdir";;
+ esac
+ done
+ dependency_libs=$temp_deplibs
+ fi
+
+ func_append newlib_search_path " $absdir"
+ # Link against this library
+ test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+ # ... and its dependency_libs
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ newdependency_libs="$deplib $newdependency_libs"
+ case $deplib in
+ -L*) func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result";;
+ *) func_resolve_sysroot "$deplib" ;;
+ esac
+ if $opt_preserve_dup_deps; then
+ case "$tmp_libs " in
+ *" $func_resolve_sysroot_result "*)
+ func_append specialdeplibs " $func_resolve_sysroot_result" ;;
+ esac
+ fi
+ func_append tmp_libs " $func_resolve_sysroot_result"
+ done
+
+ if test no != "$link_all_deplibs"; then
+ # Add the search paths of all dependency libraries
+ for deplib in $dependency_libs; do
+ path=
+ case $deplib in
+ -L*) path=$deplib ;;
+ *.la)
+ func_resolve_sysroot "$deplib"
+ deplib=$func_resolve_sysroot_result
+ func_dirname "$deplib" "" "."
+ dir=$func_dirname_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;;
+ *)
+ absdir=`cd "$dir" && pwd`
+ if test -z "$absdir"; then
+ func_warning "cannot determine absolute directory name of '$dir'"
+ absdir=$dir
+ fi
+ ;;
+ esac
+ if $GREP "^installed=no" $deplib > /dev/null; then
+ case $host in
+ *-*-darwin*)
+ depdepl=
+ eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+ if test -n "$deplibrary_names"; then
+ for tmp in $deplibrary_names; do
+ depdepl=$tmp
+ done
+ if test -f "$absdir/$objdir/$depdepl"; then
+ depdepl=$absdir/$objdir/$depdepl
+ darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+ if test -z "$darwin_install_name"; then
+ darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+ fi
+ func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl"
+ func_append linker_flags " -dylib_file $darwin_install_name:$depdepl"
+ path=
+ fi
+ fi
+ ;;
+ *)
+ path=-L$absdir/$objdir
+ ;;
+ esac
+ else
+ eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ test -z "$libdir" && \
+ func_fatal_error "'$deplib' is not a valid libtool archive"
+ test "$absdir" != "$libdir" && \
+ func_warning "'$deplib' seems to be moved"
+
+ path=-L$absdir
+ fi
+ ;;
+ esac
+ case " $deplibs " in
+ *" $path "*) ;;
+ *) deplibs="$path $deplibs" ;;
+ esac
+ done
+ fi # link_all_deplibs != no
+ fi # linkmode = lib
+ done # for deplib in $libs
+ if test link = "$pass"; then
+ if test prog = "$linkmode"; then
+ compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+ finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+ else
+ compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ fi
+ fi
+ dependency_libs=$newdependency_libs
+ if test dlpreopen = "$pass"; then
+ # Link the dlpreopened libraries before other libraries
+ for deplib in $save_deplibs; do
+ deplibs="$deplib $deplibs"
+ done
+ fi
+ if test dlopen != "$pass"; then
+ test conv = "$pass" || {
+ # Make sure lib_search_path contains only unique directories.
+ lib_search_path=
+ for dir in $newlib_search_path; do
+ case "$lib_search_path " in
+ *" $dir "*) ;;
+ *) func_append lib_search_path " $dir" ;;
+ esac
+ done
+ newlib_search_path=
+ }
+
+ if test prog,link = "$linkmode,$pass"; then
+ vars="compile_deplibs finalize_deplibs"
+ else
+ vars=deplibs
+ fi
+ for var in $vars dependency_libs; do
+ # Add libraries to $var in reverse order
+ eval tmp_libs=\"\$$var\"
+ new_libs=
+ for deplib in $tmp_libs; do
+ # FIXME: Pedantically, this is the right thing to do, so
+ # that some nasty dependency loop isn't accidentally
+ # broken:
+ #new_libs="$deplib $new_libs"
+ # Pragmatically, this seems to cause very few problems in
+ # practice:
+ case $deplib in
+ -L*) new_libs="$deplib $new_libs" ;;
+ -R*) ;;
+ *)
+ # And here is the reason: when a library appears more
+ # than once as an explicit dependence of a library, or
+ # is implicitly linked in more than once by the
+ # compiler, it is considered special, and multiple
+ # occurrences thereof are not removed. Compare this
+ # with having the same library being listed as a
+ # dependency of multiple other libraries: in this case,
+ # we know (pedantically, we assume) the library does not
+ # need to be listed more than once, so we keep only the
+ # last copy. This is not always right, but it is rare
+ # enough that we require users that really mean to play
+ # such unportable linking tricks to link the library
+ # using -Wl,-lname, so that libtool does not consider it
+ # for duplicate removal.
+ case " $specialdeplibs " in
+ *" $deplib "*) new_libs="$deplib $new_libs" ;;
+ *)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) new_libs="$deplib $new_libs" ;;
+ esac
+ ;;
+ esac
+ ;;
+ esac
+ done
+ tmp_libs=
+ for deplib in $new_libs; do
+ case $deplib in
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+ *) func_append tmp_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append tmp_libs " $deplib" ;;
+ esac
+ done
+ eval $var=\"$tmp_libs\"
+ done # for var
+ fi
+
+ # Add Sun CC postdeps if required:
+ test CXX = "$tagname" && {
+ case $host_os in
+ linux*)
+ case `$CC -V 2>&1 | $SED 5q` in
+ *Sun\ C*) # Sun C++ 5.9
+ func_suncc_cstd_abi
+
+ if test no != "$suncc_use_cstd_abi"; then
+ func_append postdeps ' -library=Cstd -library=Crun'
+ fi
+ ;;
+ esac
+ ;;
+
+ solaris*)
+ func_cc_basename "$CC"
+ case $func_cc_basename_result in
+ CC* | sunCC*)
+ func_suncc_cstd_abi
+
+ if test no != "$suncc_use_cstd_abi"; then
+ func_append postdeps ' -library=Cstd -library=Crun'
+ fi
+ ;;
+ esac
+ ;;
+ esac
+ }
+
+ # Last step: remove runtime libs from dependency_libs
+ # (they stay in deplibs)
+ tmp_libs=
+ for i in $dependency_libs; do
+ case " $predeps $postdeps $compiler_lib_search_path " in
+ *" $i "*)
+ i=
+ ;;
+ esac
+ if test -n "$i"; then
+ func_append tmp_libs " $i"
+ fi
+ done
+ dependency_libs=$tmp_libs
+ done # for pass
+ if test prog = "$linkmode"; then
+ dlfiles=$newdlfiles
+ fi
+ if test prog = "$linkmode" || test lib = "$linkmode"; then
+ dlprefiles=$newdlprefiles
+ fi
+
+ case $linkmode in
+ oldlib)
+ if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
+ func_warning "'-dlopen' is ignored for archives"
+ fi
+
+ case " $deplibs" in
+ *\ -l* | *\ -L*)
+ func_warning "'-l' and '-L' are ignored for archives" ;;
+ esac
+
+ test -n "$rpath" && \
+ func_warning "'-rpath' is ignored for archives"
+
+ test -n "$xrpath" && \
+ func_warning "'-R' is ignored for archives"
+
+ test -n "$vinfo" && \
+ func_warning "'-version-info/-version-number' is ignored for archives"
+
+ test -n "$release" && \
+ func_warning "'-release' is ignored for archives"
+
+ test -n "$export_symbols$export_symbols_regex" && \
+ func_warning "'-export-symbols' is ignored for archives"
+
+ # Now set the variables for building old libraries.
+ build_libtool_libs=no
+ oldlibs=$output
+ func_append objs "$old_deplibs"
+ ;;
+
+ lib)
+ # Make sure we only generate libraries of the form 'libNAME.la'.
+ case $outputname in
+ lib*)
+ func_stripname 'lib' '.la' "$outputname"
+ name=$func_stripname_result
+ eval shared_ext=\"$shrext_cmds\"
+ eval libname=\"$libname_spec\"
+ ;;
+ *)
+ test no = "$module" \
+ && func_fatal_help "libtool library '$output' must begin with 'lib'"
+
+ if test no != "$need_lib_prefix"; then
+ # Add the "lib" prefix for modules if required
+ func_stripname '' '.la' "$outputname"
+ name=$func_stripname_result
+ eval shared_ext=\"$shrext_cmds\"
+ eval libname=\"$libname_spec\"
+ else
+ func_stripname '' '.la' "$outputname"
+ libname=$func_stripname_result
+ fi
+ ;;
+ esac
+
+ if test -n "$objs"; then
+ if test pass_all != "$deplibs_check_method"; then
+ func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs"
+ else
+ func_warning "Linking the shared library $output against the non-libtool objects $objs is not portable!"
+ func_append libobjs " $objs"
+ fi
+ fi
+
+ test no = "$dlself" \
+ || func_warning "'-dlopen self' is ignored for libtool libraries"
+
+ set dummy $rpath
+ shift
+ test 1 -lt "$#" \
+ && func_warning "ignoring multiple '-rpath's for a libtool library"
+
+ install_libdir=$1
+
+ oldlibs=
+ if test -z "$rpath"; then
+ if test yes = "$build_libtool_libs"; then
+ # Building a libtool convenience library.
+ # Some compilers have problems with a '.al' extension so
+ # convenience libraries should have the same extension an
+ # archive normally would.
+ oldlibs="$output_objdir/$libname.$libext $oldlibs"
+ build_libtool_libs=convenience
+ build_old_libs=yes
+ fi
+
+ test -n "$vinfo" && \
+ func_warning "'-version-info/-version-number' is ignored for convenience libraries"
+
+ test -n "$release" && \
+ func_warning "'-release' is ignored for convenience libraries"
+ else
+
+ # Parse the version information argument.
+ save_ifs=$IFS; IFS=:
+ set dummy $vinfo 0 0 0
+ shift
+ IFS=$save_ifs
+
+ test -n "$7" && \
+ func_fatal_help "too many parameters to '-version-info'"
+
+ # convert absolute version numbers to libtool ages
+ # this retains compatibility with .la files and attempts
+ # to make the code below a bit more comprehensible
+
+ case $vinfo_number in
+ yes)
+ number_major=$1
+ number_minor=$2
+ number_revision=$3
+ #
+ # There are really only two kinds -- those that
+ # use the current revision as the major version
+ # and those that subtract age and use age as
+ # a minor version. But, then there is irix
+ # that has an extra 1 added just for fun
+ #
+ case $version_type in
+ # correct linux to gnu/linux during the next big refactor
+ darwin|freebsd-elf|linux|midnightbsd-elf|osf|qnx|windows|none)
+ func_arith $number_major + $number_minor
+ current=$func_arith_result
+ age=$number_minor
+ revision=$number_revision
+ ;;
+ freebsd-aout|sco|sunos)
+ current=$number_major
+ revision=$number_minor
+ age=0
+ ;;
+ irix|nonstopux)
+ func_arith $number_major + $number_minor
+ current=$func_arith_result
+ age=$number_minor
+ revision=$number_minor
+ lt_irix_increment=no
+ ;;
+ esac
+ ;;
+ no)
+ current=$1
+ revision=$2
+ age=$3
+ ;;
+ esac
+
+ # Check that each of the things are valid numbers.
+ case $current in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "CURRENT '$current' must be a nonnegative integer"
+ func_fatal_error "'$vinfo' is not valid version information"
+ ;;
+ esac
+
+ case $revision in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "REVISION '$revision' must be a nonnegative integer"
+ func_fatal_error "'$vinfo' is not valid version information"
+ ;;
+ esac
+
+ case $age in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "AGE '$age' must be a nonnegative integer"
+ func_fatal_error "'$vinfo' is not valid version information"
+ ;;
+ esac
+
+ if test "$age" -gt "$current"; then
+ func_error "AGE '$age' is greater than the current interface number '$current'"
+ func_fatal_error "'$vinfo' is not valid version information"
+ fi
+
+ # Calculate the version variables.
+ major=
+ versuffix=
+ verstring=
+ case $version_type in
+ none) ;;
+
+ darwin)
+ # Like Linux, but with the current version available in
+ # verstring for coding it into the library header
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=$major.$age.$revision
+ # Darwin ld doesn't like 0 for these options...
+ func_arith $current + 1
+ minor_current=$func_arith_result
+ xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
+ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+ # On Darwin other compilers
+ case $CC in
+ nagfor*)
+ verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
+ ;;
+ *)
+ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+ ;;
+ esac
+ ;;
+
+ freebsd-aout)
+ major=.$current
+ versuffix=.$current.$revision
+ ;;
+
+ freebsd-elf | midnightbsd-elf)
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=$major.$age.$revision
+ ;;
+
+ irix | nonstopux)
+ if test no = "$lt_irix_increment"; then
+ func_arith $current - $age
+ else
+ func_arith $current - $age + 1
+ fi
+ major=$func_arith_result
+
+ case $version_type in
+ nonstopux) verstring_prefix=nonstopux ;;
+ *) verstring_prefix=sgi ;;
+ esac
+ verstring=$verstring_prefix$major.$revision
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$revision
+ while test 0 -ne "$loop"; do
+ func_arith $revision - $loop
+ iface=$func_arith_result
+ func_arith $loop - 1
+ loop=$func_arith_result
+ verstring=$verstring_prefix$major.$iface:$verstring
+ done
+
+ # Before this point, $major must not contain '.'.
+ major=.$major
+ versuffix=$major.$revision
+ ;;
+
+ linux) # correct to gnu/linux during the next big refactor
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=$major.$age.$revision
+ ;;
+
+ osf)
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=.$current.$age.$revision
+ verstring=$current.$age.$revision
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$age
+ while test 0 -ne "$loop"; do
+ func_arith $current - $loop
+ iface=$func_arith_result
+ func_arith $loop - 1
+ loop=$func_arith_result
+ verstring=$verstring:$iface.0
+ done
+
+ # Make executables depend on our current version.
+ func_append verstring ":$current.0"
+ ;;
+
+ qnx)
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=$major.$age.$revision
+ ;;
+
+ sco)
+ major=.$current
+ versuffix=.$current
+ ;;
+
+ sunos)
+ major=.$current
+ versuffix=.$current.$revision
+ ;;
+
+ windows)
+ # Use '-' rather than '.', since we only want one
+ # extension on DOS 8.3 file systems.
+ func_arith $current - $age
+ major=$func_arith_result
+ versuffix=-$major
+ ;;
+
+ *)
+ func_fatal_configuration "unknown library version type '$version_type'"
+ ;;
+ esac
+
+ # Clear the version info if we defaulted, and they specified a release.
+ if test -z "$vinfo" && test -n "$release"; then
+ major=
+ case $version_type in
+ darwin)
+ # we can't check for "0.0" in archive_cmds due to quoting
+ # problems, so we reset it completely
+ verstring=
+ ;;
+ *)
+ verstring=0.0
+ ;;
+ esac
+ if test no = "$need_version"; then
+ versuffix=
+ else
+ versuffix=.0.0
+ fi
+ fi
+
+ # Remove version info from name if versioning should be avoided
+ if test yes,no = "$avoid_version,$need_version"; then
+ major=
+ versuffix=
+ verstring=
+ fi
+
+ # Check to see if the archive will have undefined symbols.
+ if test yes = "$allow_undefined"; then
+ if test unsupported = "$allow_undefined_flag"; then
+ if test yes = "$build_old_libs"; then
+ func_warning "undefined symbols not allowed in $host shared libraries; building static only"
+ build_libtool_libs=no
+ else
+ func_fatal_error "can't build $host shared library unless -no-undefined is specified"
+ fi
+ fi
+ else
+ # Don't allow undefined symbols.
+ allow_undefined_flag=$no_undefined_flag
+ fi
+
+ fi
+
+ func_generate_dlsyms "$libname" "$libname" :
+ func_append libobjs " $symfileobj"
+ test " " = "$libobjs" && libobjs=
+
+ if test relink != "$opt_mode"; then
+ # Remove our outputs, but don't remove object files since they
+ # may have been created when compiling PIC objects.
+ removelist=
+ tempremovelist=`$ECHO "$output_objdir/*"`
+ for p in $tempremovelist; do
+ case $p in
+ *.$objext | *.gcno)
+ ;;
+ $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*)
+ if test -n "$precious_files_regex"; then
+ if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+ then
+ continue
+ fi
+ fi
+ func_append removelist " $p"
+ ;;
+ *) ;;
+ esac
+ done
+ test -n "$removelist" && \
+ func_show_eval "${RM}r \$removelist"
+ fi
+
+ # Now set the variables for building old libraries.
+ if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then
+ func_append oldlibs " $output_objdir/$libname.$libext"
+
+ # Transform .lo files to .o files.
+ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP`
+ fi
+
+ # Eliminate all temporary directories.
+ #for path in $notinst_path; do
+ # lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+ # deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+ # dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+ #done
+
+ if test -n "$xrpath"; then
+ # If the user specified any rpath flags, then add them.
+ temp_xrpath=
+ for libdir in $xrpath; do
+ func_replace_sysroot "$libdir"
+ func_append temp_xrpath " -R$func_replace_sysroot_result"
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then
+ dependency_libs="$temp_xrpath $dependency_libs"
+ fi
+ fi
+
+ # Make sure dlfiles contains only unique files that won't be dlpreopened
+ old_dlfiles=$dlfiles
+ dlfiles=
+ for lib in $old_dlfiles; do
+ case " $dlprefiles $dlfiles " in
+ *" $lib "*) ;;
+ *) func_append dlfiles " $lib" ;;
+ esac
+ done
+
+ # Make sure dlprefiles contains only unique files
+ old_dlprefiles=$dlprefiles
+ dlprefiles=
+ for lib in $old_dlprefiles; do
+ case "$dlprefiles " in
+ *" $lib "*) ;;
+ *) func_append dlprefiles " $lib" ;;
+ esac
+ done
+
+ if test yes = "$build_libtool_libs"; then
+ if test -n "$rpath"; then
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+ # these systems don't actually have a c library (as such)!
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C library is in the System framework
+ func_append deplibs " System.ltframework"
+ ;;
+ *-*-netbsd*)
+ # Don't link with libc until the a.out ld.so is fixed.
+ ;;
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-midnightbsd*)
+ # Do not include libc due to us having libc/libc_r.
+ ;;
+ *-*-sco3.2v5* | *-*-sco5v6*)
+ # Causes problems with __ctype
+ ;;
+ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+ # Compiler inserts libc in the correct place for threads to work
+ ;;
+ *)
+ # Add libc to deplibs on all other systems if necessary.
+ if test yes = "$build_libtool_need_lc"; then
+ func_append deplibs " -lc"
+ fi
+ ;;
+ esac
+ fi
+
+ # Transform deplibs into only deplibs that can be linked in shared.
+ name_save=$name
+ libname_save=$libname
+ release_save=$release
+ versuffix_save=$versuffix
+ major_save=$major
+ # I'm not sure if I'm treating the release correctly. I think
+ # release should show up in the -l (ie -lgmp5) so we don't want to
+ # add it in twice. Is that correct?
+ release=
+ versuffix=
+ major=
+ newdeplibs=
+ droppeddeps=no
+ case $deplibs_check_method in
+ pass_all)
+ # Don't check for shared/static. Everything works.
+ # This might be a little naive. We might want to check
+ # whether the library exists or not. But this is on
+ # osf3 & osf4 and I'm not really sure... Just
+ # implementing what was already the behavior.
+ newdeplibs=$deplibs
+ ;;
+ file_magic*)
+ set dummy $deplibs_check_method; shift
+ file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ for a_deplib in $deplibs; do
+ case $a_deplib in
+ -l*)
+ func_stripname -l '' "$a_deplib"
+ name=$func_stripname_result
+ if test yes = "$allow_libtool_libs_with_static_runtimes"; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+ func_append newdeplibs " $a_deplib"
+ a_deplib=
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib"; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ if test -n "$file_magic_glob"; then
+ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+ else
+ libnameglob=$libname
+ fi
+ test yes = "$want_nocaseglob" && nocaseglob=`shopt -p nocaseglob`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+ if test yes = "$want_nocaseglob"; then
+ shopt -s nocaseglob
+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+ $nocaseglob
+ else
+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+ fi
+ for potent_lib in $potential_libs; do
+ # Follow soft links.
+ if ls -lLd "$potent_lib" 2>/dev/null |
+ $GREP " -> " >/dev/null; then
+ continue
+ fi
+ # The statement above tries to avoid entering an
+ # endless loop below, in case of cyclic links.
+ # We might still enter an endless loop, since a link
+ # loop can be closed while we follow links,
+ # but so what?
+ potlib=$potent_lib
+ while test -h "$potlib" 2>/dev/null; do
+ potliblink=`ls -ld $potlib | $SED 's/.* -> //'`
+ case $potliblink in
+ [\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;;
+ *) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";;
+ esac
+ done
+ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+ $SED -e 10q |
+ $EGREP "$file_magic_regex" > /dev/null; then
+ func_append newdeplibs " $a_deplib"
+ a_deplib=
+ break 2
+ fi
+ done
+ done
+ fi
+ if test -n "$a_deplib"; then
+ droppeddeps=yes
+ func_warning "Linker path does not have real file for library $a_deplib."
+ func_warning "I have the capability to make that library automatically link in when"
+ func_warning "you link to this library. But I can only do this if you have a"
+ func_warning "shared version of the library, which you do not appear to have"
+ func_warning "because I did check the linker path looking for a file starting"
+ if test -z "$potlib"; then
+ func_warning "with $libname but no candidates were found. (...for file magic test)"
+ else
+ func_warning "with $libname and none of the candidates passed a file format test"
+ func_warning "using a file magic. Last file checked: $potlib"
+ fi
+ fi
+ ;;
+ *)
+ # Add a -L argument.
+ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+ ;;
+ match_pattern*)
+ set dummy $deplibs_check_method; shift
+ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ for a_deplib in $deplibs; do
+ case $a_deplib in
+ -l*)
+ func_stripname -l '' "$a_deplib"
+ name=$func_stripname_result
+ if test yes = "$allow_libtool_libs_with_static_runtimes"; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+ func_append newdeplibs " $a_deplib"
+ a_deplib=
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib"; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+ potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+ for potent_lib in $potential_libs; do
+ potlib=$potent_lib # see symlink-check above in file_magic test
+ if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+ $EGREP "$match_pattern_regex" > /dev/null; then
+ func_append newdeplibs " $a_deplib"
+ a_deplib=
+ break 2
+ fi
+ done
+ done
+ fi
+ if test -n "$a_deplib"; then
+ droppeddeps=yes
+ func_warning "Linker path does not have real file for library $a_deplib."
+ func_warning "I have the capability to make that library automatically link in when"
+ func_warning "you link to this library. But I can only do this if you have a"
+ func_warning "shared version of the library, which you do not appear to have"
+ func_warning "because I did check the linker path looking for a file starting"
+ if test -z "$potlib"; then
+ func_warning "with $libname but no candidates were found. (...for regex pattern test)"
+ else
+ func_warning "with $libname and none of the candidates passed a file format test"
+ func_warning "using a regex pattern. Last file checked: $potlib"
+ fi
+ fi
+ ;;
+ *)
+ # Add a -L argument.
+ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+ ;;
+ none | unknown | *)
+ newdeplibs=
+ tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+ if test yes = "$allow_libtool_libs_with_static_runtimes"; then
+ for i in $predeps $postdeps; do
+ # can't use Xsed below, because $i might contain '/'
+ tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"`
+ done
+ fi
+ case $tmp_deplibs in
+ *[!\ \ ]*)
+ echo
+ if test none = "$deplibs_check_method"; then
+ func_warning "Inter-library dependencies are not supported in this platform."
+ else
+ func_warning "Inter-library dependencies are not known to be supported."
+ fi
+ func_warning "All declared inter-library dependencies are being dropped."
+ droppeddeps=yes
+ ;;
+ esac
+ ;;
+ esac
+ versuffix=$versuffix_save
+ major=$major_save
+ release=$release_save
+ libname=$libname_save
+ name=$name_save
+
+ case $host in
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # On Rhapsody replace the C library with the System framework
+ newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+ ;;
+ esac
+
+ if test yes = "$droppeddeps"; then
+ if test yes = "$module"; then
+ func_warning "libtool could not satisfy all declared inter-library"
+ func_warning "dependencies of module $libname. Therefore, libtool will create"
+ func_warning "a static module, that should work as long as the dlopening"
+ func_warning "application is linked with the -dlopen flag."
+ if test -z "$global_symbol_pipe"; then
+ func_warning "However, this would only work if libtool was able to extract symbol"
+ func_warning "lists from a program, using 'nm' or equivalent, but libtool could"
+ func_warning "not find such a program. So, this module is probably useless."
+ func_warning "'nm' from GNU binutils and a full rebuild may help."
+ fi
+ if test no = "$build_old_libs"; then
+ oldlibs=$output_objdir/$libname.$libext
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ else
+ echo "*** The inter-library dependencies that have been dropped here will be"
+ echo "*** automatically added whenever a program is linked with this library"
+ echo "*** or is declared to -dlopen it."
+
+ if test no = "$allow_undefined"; then
+ echo
+ echo "*** Since this library must not contain undefined symbols,"
+ echo "*** because either the platform does not support them or"
+ echo "*** it was explicitly requested with -no-undefined,"
+ echo "*** libtool will only create a static version of it."
+ if test no = "$build_old_libs"; then
+ oldlibs=$output_objdir/$libname.$libext
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ fi
+ fi
+ fi
+ # Done checking deplibs!
+ deplibs=$newdeplibs
+ fi
+ # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+ case $host in
+ *-*-darwin*)
+ newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ ;;
+ esac
+
+ # move library search paths that coincide with paths to not yet
+ # installed libraries to the beginning of the library search list
+ new_libs=
+ for path in $notinst_path; do
+ case " $new_libs " in
+ *" -L$path/$objdir "*) ;;
+ *)
+ case " $deplibs " in
+ *" -L$path/$objdir "*)
+ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+ done
+ for deplib in $deplibs; do
+ case $deplib in
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ deplibs=$new_libs
+
+ # All the library-specific variables (install_libdir is set above).
+ library_names=
+ old_library=
+ dlname=
+
+ # Test again, we may have decided not to build it any more
+ if test yes = "$build_libtool_libs"; then
+ # Remove $wl instances when linking with ld.
+ # FIXME: should test the right _cmds variable.
+ case $archive_cmds in
+ *\$LD\ *) wl= ;;
+ esac
+ if test yes = "$hardcode_into_libs"; then
+ # Hardcode the library paths
+ hardcode_libdirs=
+ dep_rpath=
+ rpath=$finalize_rpath
+ test relink = "$opt_mode" || rpath=$compile_rpath$rpath
+ for libdir in $rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ func_replace_sysroot "$libdir"
+ libdir=$func_replace_sysroot_result
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs=$libdir
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append dep_rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_append perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir=$hardcode_libdirs
+ eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
+ fi
+ if test -n "$runpath_var" && test -n "$perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+ fi
+ test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+ fi
+
+ shlibpath=$finalize_shlibpath
+ test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath
+ if test -n "$shlibpath"; then
+ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+ fi
+
+ # Get the real and link names of the library.
+ eval shared_ext=\"$shrext_cmds\"
+ eval library_names=\"$library_names_spec\"
+ set dummy $library_names
+ shift
+ realname=$1
+ shift
+
+ if test -n "$soname_spec"; then
+ eval soname=\"$soname_spec\"
+ else
+ soname=$realname
+ fi
+ if test -z "$dlname"; then
+ dlname=$soname
+ fi
+
+ lib=$output_objdir/$realname
+ linknames=
+ for link
+ do
+ func_append linknames " $link"
+ done
+
+ # Use standard objects if they are pic
+ test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ test "X$libobjs" = "X " && libobjs=
+
+ delfiles=
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+ export_symbols=$output_objdir/$libname.uexp
+ func_append delfiles " $export_symbols"
+ fi
+
+ orig_export_symbols=
+ case $host_os in
+ cygwin* | mingw* | windows* | cegcc*)
+ if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+ # exporting using user supplied symfile
+ func_dll_def_p "$export_symbols" || {
+ # and it's NOT already a .def file. Must figure out
+ # which of the given symbols are data symbols and tag
+ # them as such. So, trigger use of export_symbols_cmds.
+ # export_symbols gets reassigned inside the "prepare
+ # the list of exported symbols" if statement, so the
+ # include_expsyms logic still works.
+ orig_export_symbols=$export_symbols
+ export_symbols=
+ always_export_symbols=yes
+ }
+ fi
+ ;;
+ esac
+
+ # Prepare the list of exported symbols
+ if test -z "$export_symbols"; then
+ if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then
+ func_verbose "generating symbol list for '$libname.la'"
+ export_symbols=$output_objdir/$libname.exp
+ $opt_dry_run || $RM $export_symbols
+ cmds=$export_symbols_cmds
+ save_ifs=$IFS; IFS='~'
+ for cmd1 in $cmds; do
+ IFS=$save_ifs
+ # Take the normal branch if the nm_file_list_spec branch
+ # doesn't work or if tool conversion is not needed.
+ case $nm_file_list_spec~$to_tool_file_cmd in
+ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+ try_normal_branch=yes
+ eval cmd=\"$cmd1\"
+ func_len " $cmd"
+ len=$func_len_result
+ ;;
+ *)
+ try_normal_branch=no
+ ;;
+ esac
+ if test yes = "$try_normal_branch" \
+ && { test "$len" -lt "$max_cmd_len" \
+ || test "$max_cmd_len" -le -1; }
+ then
+ func_show_eval "$cmd" 'exit $?'
+ skipped_export=false
+ elif test -n "$nm_file_list_spec"; then
+ func_basename "$output"
+ output_la=$func_basename_result
+ save_libobjs=$libobjs
+ save_output=$output
+ output=$output_objdir/$output_la.nm
+ func_to_tool_file "$output"
+ libobjs=$nm_file_list_spec$func_to_tool_file_result
+ func_append delfiles " $output"
+ func_verbose "creating $NM input file list: $output"
+ for obj in $save_libobjs; do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result"
+ done > "$output"
+ eval cmd=\"$cmd1\"
+ func_show_eval "$cmd" 'exit $?'
+ output=$save_output
+ libobjs=$save_libobjs
+ skipped_export=false
+ else
+ # The command line is too long to execute in one step.
+ func_verbose "using reloadable object file for export list..."
+ skipped_export=:
+ # Break out early, otherwise skipped_export may be
+ # set to false by a later but shorter cmd.
+ break
+ fi
+ done
+ IFS=$save_ifs
+ if test -n "$export_symbols_regex" && test : != "$skipped_export"; then
+ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+ func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+ fi
+ fi
+ fi
+
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols=$export_symbols
+ test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test : != "$skipped_export" && test -n "$orig_export_symbols"; then
+ # The given exports_symbols file has to be filtered, so filter it.
+ func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
+ # FIXME: $output_objdir/$libname.filter potentially contains lots of
+ # 's' commands, which not all seds can handle. GNU sed should be fine
+ # though. Also, the filter scales superlinearly with the number of
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+
+ tmp_deplibs=
+ for test_deplib in $deplibs; do
+ case " $convenience " in
+ *" $test_deplib "*) ;;
+ *)
+ func_append tmp_deplibs " $test_deplib"
+ ;;
+ esac
+ done
+ deplibs=$tmp_deplibs
+
+ if test -n "$convenience"; then
+ if test -n "$whole_archive_flag_spec" &&
+ test yes = "$compiler_needs_object" &&
+ test -z "$libobjs"; then
+ # extract the archives, so we have objects to list.
+ # TODO: could optimize this to just extract one archive.
+ whole_archive_flag_spec=
+ fi
+ if test -n "$whole_archive_flag_spec"; then
+ save_libobjs=$libobjs
+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ else
+ gentop=$output_objdir/${outputname}x
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ fi
+
+ if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then
+ eval flag=\"$thread_safe_flag_spec\"
+ func_append linker_flags " $flag"
+ fi
+
+ # Make a backup of the uninstalled library when relinking
+ if test relink = "$opt_mode"; then
+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+ fi
+
+ # Do each of the archive commands.
+ if test yes = "$module" && test -n "$module_cmds"; then
+ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+ eval test_cmds=\"$module_expsym_cmds\"
+ cmds=$module_expsym_cmds
+ else
+ eval test_cmds=\"$module_cmds\"
+ cmds=$module_cmds
+ fi
+ else
+ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+ eval test_cmds=\"$archive_expsym_cmds\"
+ cmds=$archive_expsym_cmds
+ else
+ eval test_cmds=\"$archive_cmds\"
+ cmds=$archive_cmds
+ fi
+ fi
+
+ if test : != "$skipped_export" &&
+ func_len " $test_cmds" &&
+ len=$func_len_result &&
+ test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+ :
+ else
+ # The command line is too long to link in one step, link piecewise
+ # or, if using GNU ld and skipped_export is not :, use a linker
+ # script.
+
+ # Save the value of $output and $libobjs because we want to
+ # use them later. If we have whole_archive_flag_spec, we
+ # want to use save_libobjs as it was before
+ # whole_archive_flag_spec was expanded, because we can't
+ # assume the linker understands whole_archive_flag_spec.
+ # This may have to be revisited, in case too many
+ # convenience libraries get linked in and end up exceeding
+ # the spec.
+ if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+ save_libobjs=$libobjs
+ fi
+ save_output=$output
+ func_basename "$output"
+ output_la=$func_basename_result
+
+ # Clear the reloadable object creation command queue and
+ # initialize k to one.
+ test_cmds=
+ concat_cmds=
+ objlist=
+ last_robj=
+ k=1
+
+ if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then
+ output=$output_objdir/$output_la.lnkscript
+ func_verbose "creating GNU ld script: $output"
+ echo 'INPUT (' > $output
+ for obj in $save_libobjs
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result" >> $output
+ done
+ echo ')' >> $output
+ func_append delfiles " $output"
+ func_to_tool_file "$output"
+ output=$func_to_tool_file_result
+ elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then
+ output=$output_objdir/$output_la.lnk
+ func_verbose "creating linker input file list: $output"
+ : > $output
+ set x $save_libobjs
+ shift
+ firstobj=
+ if test yes = "$compiler_needs_object"; then
+ firstobj="$1 "
+ shift
+ fi
+ for obj
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result" >> $output
+ done
+ func_append delfiles " $output"
+ func_to_tool_file "$output"
+ output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+ else
+ if test -n "$save_libobjs"; then
+ func_verbose "creating reloadable object files..."
+ output=$output_objdir/$output_la-$k.$objext
+ eval test_cmds=\"$reload_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+
+ # Loop over the list of objects to be linked.
+ for obj in $save_libobjs
+ do
+ func_len " $obj"
+ func_arith $len + $func_len_result
+ len=$func_arith_result
+ if test -z "$objlist" ||
+ test "$len" -lt "$max_cmd_len"; then
+ func_append objlist " $obj"
+ else
+ # The command $test_cmds is almost too long, add a
+ # command to the queue.
+ if test 1 -eq "$k"; then
+ # The first file doesn't have a previous command to add.
+ reload_objs=$objlist
+ eval concat_cmds=\"$reload_cmds\"
+ else
+ # All subsequent reloadable object files will link in
+ # the last one created.
+ reload_objs="$objlist $last_robj"
+ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+ fi
+ last_robj=$output_objdir/$output_la-$k.$objext
+ func_arith $k + 1
+ k=$func_arith_result
+ output=$output_objdir/$output_la-$k.$objext
+ objlist=" $obj"
+ func_len " $last_robj"
+ func_arith $len0 + $func_len_result
+ len=$func_arith_result
+ fi
+ done
+ # Handle the remaining objects by creating one last
+ # reloadable object file. All subsequent reloadable object
+ # files will link in the last one created.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ reload_objs="$objlist $last_robj"
+ eval concat_cmds=\"\$concat_cmds$reload_cmds\"
+ if test -n "$last_robj"; then
+ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+ fi
+ func_append delfiles " $output"
+
+ else
+ output=
+ fi
+
+ ${skipped_export-false} && {
+ func_verbose "generating symbol list for '$libname.la'"
+ export_symbols=$output_objdir/$libname.exp
+ $opt_dry_run || $RM $export_symbols
+ libobjs=$output
+ # Append the command to create the export file.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+ if test -n "$last_robj"; then
+ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+ fi
+ }
+
+ test -n "$save_libobjs" &&
+ func_verbose "creating a temporary reloadable object file: $output"
+
+ # Loop through the commands generated above and execute them.
+ save_ifs=$IFS; IFS='~'
+ for cmd in $concat_cmds; do
+ IFS=$save_ifs
+ $opt_quiet || {
+ func_quote_arg expand,pretty "$cmd"
+ eval "func_echo $func_quote_arg_result"
+ }
+ $opt_dry_run || eval "$cmd" || {
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+ if test relink = "$opt_mode"; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+ fi
+
+ exit $lt_exit
+ }
+ done
+ IFS=$save_ifs
+
+ if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+ func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+ fi
+ fi
+
+ ${skipped_export-false} && {
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols=$export_symbols
+ test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test -n "$orig_export_symbols"; then
+ # The given exports_symbols file has to be filtered, so filter it.
+ func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
+ # FIXME: $output_objdir/$libname.filter potentially contains lots of
+ # 's' commands, which not all seds can handle. GNU sed should be fine
+ # though. Also, the filter scales superlinearly with the number of
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+ }
+
+ libobjs=$output
+ # Restore the value of output.
+ output=$save_output
+
+ if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ # Expand the library linking commands again to reset the
+ # value of $libobjs for piecewise linking.
+
+ # Do each of the archive commands.
+ if test yes = "$module" && test -n "$module_cmds"; then
+ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+ cmds=$module_expsym_cmds
+ else
+ cmds=$module_cmds
+ fi
+ else
+ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+ cmds=$archive_expsym_cmds
+ else
+ cmds=$archive_cmds
+ fi
+ fi
+ fi
+
+ if test -n "$delfiles"; then
+ # Append the command to remove temporary files to $cmds.
+ eval cmds=\"\$cmds~\$RM $delfiles\"
+ fi
+
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop=$output_objdir/${outputname}x
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+
+ save_ifs=$IFS; IFS='~'
+ for cmd in $cmds; do
+ IFS=$sp$nl
+ eval cmd=\"$cmd\"
+ IFS=$save_ifs
+ $opt_quiet || {
+ func_quote_arg expand,pretty "$cmd"
+ eval "func_echo $func_quote_arg_result"
+ }
+ $opt_dry_run || eval "$cmd" || {
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+ if test relink = "$opt_mode"; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+ fi
+
+ exit $lt_exit
+ }
+ done
+ IFS=$save_ifs
+
+ # Restore the uninstalled library and exit
+ if test relink = "$opt_mode"; then
+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+ if test -n "$convenience"; then
+ if test -z "$whole_archive_flag_spec"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ # Create links to the real library.
+ for linkname in $linknames; do
+ if test "$realname" != "$linkname"; then
+ func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+ fi
+ done
+
+ # If -module or -export-dynamic was specified, set the dlname.
+ if test yes = "$module" || test yes = "$export_dynamic"; then
+ # On all known operating systems, these are identical.
+ dlname=$soname
+ fi
+ fi
+ ;;
+
+ obj)
+ if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
+ func_warning "'-dlopen' is ignored for objects"
+ fi
+
+ case " $deplibs" in
+ *\ -l* | *\ -L*)
+ func_warning "'-l' and '-L' are ignored for objects" ;;
+ esac
+
+ test -n "$rpath" && \
+ func_warning "'-rpath' is ignored for objects"
+
+ test -n "$xrpath" && \
+ func_warning "'-R' is ignored for objects"
+
+ test -n "$vinfo" && \
+ func_warning "'-version-info' is ignored for objects"
+
+ test -n "$release" && \
+ func_warning "'-release' is ignored for objects"
+
+ case $output in
+ *.lo)
+ test -n "$objs$old_deplibs" && \
+ func_fatal_error "cannot build library object '$output' from non-libtool objects"
+
+ libobj=$output
+ func_lo2o "$libobj"
+ obj=$func_lo2o_result
+ ;;
+ *)
+ libobj=
+ obj=$output
+ ;;
+ esac
+
+ # Delete the old objects.
+ $opt_dry_run || $RM $obj $libobj
+
+ # Objects from convenience libraries. This assumes
+ # single-version convenience libraries. Whenever we create
+ # different ones for PIC/non-PIC, this we'll have to duplicate
+ # the extraction.
+ reload_conv_objs=
+ gentop=
+ # if reload_cmds runs $LD directly, get rid of -Wl from
+ # whole_archive_flag_spec and hope we can get by with turning comma
+ # into space.
+ case $reload_cmds in
+ *\$LD[\ \$]*) wl= ;;
+ esac
+ if test -n "$convenience"; then
+ if test -n "$whole_archive_flag_spec"; then
+ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+ test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+ reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags
+ else
+ gentop=$output_objdir/${obj}x
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+ reload_conv_objs="$reload_objs $func_extract_archives_result"
+ fi
+ fi
+
+ # If we're not building shared, we need to use non_pic_objs
+ test yes = "$build_libtool_libs" || libobjs=$non_pic_objects
+
+ # Create the old-style object.
+ reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs
+
+ output=$obj
+ func_execute_cmds "$reload_cmds" 'exit $?'
+
+ # Exit if we aren't doing a library object file.
+ if test -z "$libobj"; then
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ test yes = "$build_libtool_libs" || {
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ # Create an invalid libtool object if no PIC, so that we don't
+ # accidentally link it into a program.
+ # $show "echo timestamp > $libobj"
+ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+ exit $EXIT_SUCCESS
+ }
+
+ if test -n "$pic_flag" || test default != "$pic_mode"; then
+ # Only do commands if we really have different PIC objects.
+ reload_objs="$libobjs $reload_conv_objs"
+ output=$libobj
+ func_execute_cmds "$reload_cmds" 'exit $?'
+ fi
+
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ exit $EXIT_SUCCESS
+ ;;
+
+ prog)
+ case $host in
+ *cygwin*) func_stripname '' '.exe' "$output"
+ output=$func_stripname_result.exe;;
+ esac
+ test -n "$vinfo" && \
+ func_warning "'-version-info' is ignored for programs"
+
+ test -n "$release" && \
+ func_warning "'-release' is ignored for programs"
+
+ $preload \
+ && test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \
+ && func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+ case $host in
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # On Rhapsody replace the C library is the System framework
+ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+ ;;
+ esac
+
+ case $host in
+ *-*-darwin*)
+ # Don't allow lazy linking, it breaks C++ global constructors
+ # But is supposedly fixed on 10.4 or later (yay!).
+ if test CXX = "$tagname"; then
+ case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+ 10.[0123])
+ func_append compile_command " $wl-bind_at_load"
+ func_append finalize_command " $wl-bind_at_load"
+ ;;
+ esac
+ fi
+ # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ ;;
+ esac
+
+
+ # move library search paths that coincide with paths to not yet
+ # installed libraries to the beginning of the library search list
+ new_libs=
+ for path in $notinst_path; do
+ case " $new_libs " in
+ *" -L$path/$objdir "*) ;;
+ *)
+ case " $compile_deplibs " in
+ *" -L$path/$objdir "*)
+ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+ done
+ for deplib in $compile_deplibs; do
+ case $deplib in
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ compile_deplibs=$new_libs
+
+
+ func_append compile_command " $compile_deplibs"
+ func_append finalize_command " $finalize_deplibs"
+
+ if test -n "$rpath$xrpath"; then
+ # If the user specified any rpath flags, then add them.
+ for libdir in $rpath $xrpath; do
+ # This is the magic to use -rpath.
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ fi
+
+ # Now hardcode the library paths
+ rpath=
+ hardcode_libdirs=
+ for libdir in $compile_rpath $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs=$libdir
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_append perm_rpath " $libdir" ;;
+ esac
+ fi
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-windows* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'`
+ case :$dllsearchpath: in
+ *":$libdir:"*) ;;
+ ::) dllsearchpath=$libdir;;
+ *) func_append dllsearchpath ":$libdir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir=$hardcode_libdirs
+ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ compile_rpath=$rpath
+
+ rpath=
+ hardcode_libdirs=
+ for libdir in $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs=$libdir
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$finalize_perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir=$hardcode_libdirs
+ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ finalize_rpath=$rpath
+
+ if test -n "$libobjs" && test yes = "$build_old_libs"; then
+ # Transform all the library objects into standard objects.
+ compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ fi
+
+ func_generate_dlsyms "$outputname" "@PROGRAM@" false
+
+ # template prelinking step
+ if test -n "$prelink_cmds"; then
+ func_execute_cmds "$prelink_cmds" 'exit $?'
+ fi
+
+ wrappers_required=:
+ case $host in
+ *cegcc* | *mingw32ce*)
+ # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+ wrappers_required=false
+ ;;
+ *cygwin* | *mingw* | *windows* )
+ test yes = "$build_libtool_libs" || wrappers_required=false
+ ;;
+ *)
+ if test no = "$need_relink" || test yes != "$build_libtool_libs"; then
+ wrappers_required=false
+ fi
+ ;;
+ esac
+ $wrappers_required || {
+ # Replace the output file specification.
+ compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+ link_command=$compile_command$compile_rpath
+
+ # We have no uninstalled library dependencies, so finalize right now.
+ exit_status=0
+ func_show_eval "$link_command" 'exit_status=$?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ # Delete the generated files.
+ if test -f "$output_objdir/${outputname}S.$objext"; then
+ func_show_eval '$RM "$output_objdir/${outputname}S.$objext"'
+ fi
+
+ exit $exit_status
+ }
+
+ if test -n "$compile_shlibpath$finalize_shlibpath"; then
+ compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+ fi
+ if test -n "$finalize_shlibpath"; then
+ finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+ fi
+
+ compile_var=
+ finalize_var=
+ if test -n "$runpath_var"; then
+ if test -n "$perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+ if test -n "$finalize_perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $finalize_perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+ fi
+
+ if test yes = "$no_install"; then
+ # We don't need to create a wrapper script.
+ link_command=$compile_var$compile_command$compile_rpath
+ # Replace the output file specification.
+ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+ # Delete the old output file.
+ $opt_dry_run || $RM $output
+ # Link the executable and exit
+ func_show_eval "$link_command" 'exit $?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ case $hardcode_action,$fast_install in
+ relink,*)
+ # Fast installation is not supported
+ link_command=$compile_var$compile_command$compile_rpath
+ relink_command=$finalize_var$finalize_command$finalize_rpath
+
+ func_warning "this platform does not like uninstalled shared libraries"
+ func_warning "'$output' will be relinked during installation"
+ ;;
+ *,yes)
+ link_command=$finalize_var$compile_command$finalize_rpath
+ relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+ ;;
+ *,no)
+ link_command=$compile_var$compile_command$compile_rpath
+ relink_command=$finalize_var$finalize_command$finalize_rpath
+ ;;
+ *,needless)
+ link_command=$finalize_var$compile_command$finalize_rpath
+ relink_command=
+ ;;
+ esac
+
+ # Replace the output file specification.
+ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+ # Delete the old output files.
+ $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+ func_show_eval "$link_command" 'exit $?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output_objdir/$outputname"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ # Now create the wrapper script.
+ func_verbose "creating $output"
+
+ # Quote the relink command for shipping.
+ if test -n "$relink_command"; then
+ # Preserve any variables that may affect compiler behavior
+ for var in $variables_saved_for_relink; do
+ if eval test -z \"\${$var+set}\"; then
+ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+ elif eval var_value=\$$var; test -z "$var_value"; then
+ relink_command="$var=; export $var; $relink_command"
+ else
+ func_quote_arg pretty "$var_value"
+ relink_command="$var=$func_quote_arg_result; export $var; $relink_command"
+ fi
+ done
+ func_quote eval cd "`pwd`"
+ func_quote_arg pretty,unquoted "($func_quote_result; $relink_command)"
+ relink_command=$func_quote_arg_unquoted_result
+ fi
+
+ # Only actually do things if not in dry run mode.
+ $opt_dry_run || {
+ # win32 will think the script is a binary if it has
+ # a .exe suffix, so we strip it off here.
+ case $output in
+ *.exe) func_stripname '' '.exe' "$output"
+ output=$func_stripname_result ;;
+ esac
+ # test for cygwin because mv fails w/o .exe extensions
+ case $host in
+ *cygwin*)
+ exeext=.exe
+ func_stripname '' '.exe' "$outputname"
+ outputname=$func_stripname_result ;;
+ *) exeext= ;;
+ esac
+ case $host in
+ *cygwin* | *mingw* | windows* )
+ func_dirname_and_basename "$output" "" "."
+ output_name=$func_basename_result
+ output_path=$func_dirname_result
+ cwrappersource=$output_path/$objdir/lt-$output_name.c
+ cwrapper=$output_path/$output_name.exe
+ $RM $cwrappersource $cwrapper
+ trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+ func_emit_cwrapperexe_src > $cwrappersource
+
+ # The wrapper executable is built using the $host compiler,
+ # because it contains $host paths and files. If cross-
+ # compiling, it, like the target executable, must be
+ # executed on the $host or under an emulation environment.
+ $opt_dry_run || {
+ $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+ $STRIP $cwrapper
+ }
+
+ # Now, create the wrapper script for func_source use:
+ func_ltwrapper_scriptname $cwrapper
+ $RM $func_ltwrapper_scriptname_result
+ trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+ $opt_dry_run || {
+ # note: this script will not be executed, so do not chmod.
+ if test "x$build" = "x$host"; then
+ $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+ else
+ func_emit_wrapper no > $func_ltwrapper_scriptname_result
+ fi
+ }
+ ;;
+ * )
+ $RM $output
+ trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+ func_emit_wrapper no > $output
+ chmod +x $output
+ ;;
+ esac
+ }
+ exit $EXIT_SUCCESS
+ ;;
+ esac
+
+ # See if we need to build an old-fashioned archive.
+ for oldlib in $oldlibs; do
+
+ case $build_libtool_libs in
+ convenience)
+ oldobjs="$libobjs_save $symfileobj"
+ addlibs=$convenience
+ build_libtool_libs=no
+ ;;
+ module)
+ oldobjs=$libobjs_save
+ addlibs=$old_convenience
+ build_libtool_libs=no
+ ;;
+ *)
+ oldobjs="$old_deplibs $non_pic_objects"
+ $preload && test -f "$symfileobj" \
+ && func_append oldobjs " $symfileobj"
+ addlibs=$old_convenience
+ ;;
+ esac
+
+ if test -n "$addlibs"; then
+ gentop=$output_objdir/${outputname}x
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $addlibs
+ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # Do each command in the archive commands.
+ if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then
+ cmds=$old_archive_from_new_cmds
+ else
+
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop=$output_objdir/${outputname}x
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # POSIX demands no paths to be encoded in archives. We have
+ # to avoid creating archives with duplicate basenames if we
+ # might have to extract them afterwards, e.g., when creating a
+ # static archive out of a convenience library, or when linking
+ # the entirety of a libtool archive into another (currently
+ # not supported by libtool).
+ if (for obj in $oldobjs
+ do
+ func_basename "$obj"
+ $ECHO "$func_basename_result"
+ done | sort | sort -uc >/dev/null 2>&1); then
+ :
+ else
+ echo "copying selected object files to avoid basename conflicts..."
+ gentop=$output_objdir/${outputname}x
+ func_append generated " $gentop"
+ func_mkdir_p "$gentop"
+ save_oldobjs=$oldobjs
+ oldobjs=
+ counter=1
+ for obj in $save_oldobjs
+ do
+ func_basename "$obj"
+ objbase=$func_basename_result
+ case " $oldobjs " in
+ " ") oldobjs=$obj ;;
+ *[\ /]"$objbase "*)
+ while :; do
+ # Make sure we don't pick an alternate name that also
+ # overlaps.
+ newobj=lt$counter-$objbase
+ func_arith $counter + 1
+ counter=$func_arith_result
+ case " $oldobjs " in
+ *[\ /]"$newobj "*) ;;
+ *) if test ! -f "$gentop/$newobj"; then break; fi ;;
+ esac
+ done
+ func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+ func_append oldobjs " $gentop/$newobj"
+ ;;
+ *) func_append oldobjs " $obj" ;;
+ esac
+ done
+ fi
+ func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
+ tool_oldlib=$func_to_tool_file_result
+ eval cmds=\"$old_archive_cmds\"
+
+ func_len " $cmds"
+ len=$func_len_result
+ if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+ cmds=$old_archive_cmds
+ elif test -n "$archiver_list_spec"; then
+ func_verbose "using command file archive linking..."
+ for obj in $oldobjs
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result"
+ done > $output_objdir/$libname.libcmd
+ func_to_tool_file "$output_objdir/$libname.libcmd"
+ oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+ cmds=$old_archive_cmds
+ else
+ # the command line is too long to link in one step, link in parts
+ func_verbose "using piecewise archive linking..."
+ save_RANLIB=$RANLIB
+ RANLIB=:
+ objlist=
+ concat_cmds=
+ save_oldobjs=$oldobjs
+ oldobjs=
+ # Is there a better way of finding the last object in the list?
+ for obj in $save_oldobjs
+ do
+ last_oldobj=$obj
+ done
+ eval test_cmds=\"$old_archive_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+ for obj in $save_oldobjs
+ do
+ func_len " $obj"
+ func_arith $len + $func_len_result
+ len=$func_arith_result
+ func_append objlist " $obj"
+ if test "$len" -lt "$max_cmd_len"; then
+ :
+ else
+ # the above command should be used before it gets too long
+ oldobjs=$objlist
+ if test "$obj" = "$last_oldobj"; then
+ RANLIB=$save_RANLIB
+ fi
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ eval concat_cmds=\"\$concat_cmds$old_archive_cmds\"
+ objlist=
+ len=$len0
+ fi
+ done
+ RANLIB=$save_RANLIB
+ oldobjs=$objlist
+ if test -z "$oldobjs"; then
+ eval cmds=\"\$concat_cmds\"
+ else
+ eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+ fi
+ fi
+ fi
+ func_execute_cmds "$cmds" 'exit $?'
+ done
+
+ test -n "$generated" && \
+ func_show_eval "${RM}r$generated"
+
+ # Now create the libtool archive.
+ case $output in
+ *.la)
+ old_library=
+ test yes = "$build_old_libs" && old_library=$libname.$libext
+ func_verbose "creating $output"
+
+ # Preserve any variables that may affect compiler behavior
+ for var in $variables_saved_for_relink; do
+ if eval test -z \"\${$var+set}\"; then
+ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+ elif eval var_value=\$$var; test -z "$var_value"; then
+ relink_command="$var=; export $var; $relink_command"
+ else
+ func_quote_arg pretty,unquoted "$var_value"
+ relink_command="$var=$func_quote_arg_unquoted_result; export $var; $relink_command"
+ fi
+ done
+ # Quote the link command for shipping.
+ func_quote eval cd "`pwd`"
+ relink_command="($func_quote_result; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+ func_quote_arg pretty,unquoted "$relink_command"
+ relink_command=$func_quote_arg_unquoted_result
+ if test yes = "$hardcode_automatic"; then
+ relink_command=
+ fi
+
+ # Only create the output if not a dry run.
+ $opt_dry_run || {
+ for installed in no yes; do
+ if test yes = "$installed"; then
+ if test -z "$install_libdir"; then
+ break
+ fi
+ output=$output_objdir/${outputname}i
+ # Replace all uninstalled libtool libraries with the installed ones
+ newdependency_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ *.la)
+ func_basename "$deplib"
+ name=$func_basename_result
+ func_resolve_sysroot "$deplib"
+ eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+ test -z "$libdir" && \
+ func_fatal_error "'$deplib' is not a valid libtool archive"
+ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ -L*)
+ func_stripname -L '' "$deplib"
+ func_replace_sysroot "$func_stripname_result"
+ func_append newdependency_libs " -L$func_replace_sysroot_result"
+ ;;
+ -R*)
+ func_stripname -R '' "$deplib"
+ func_replace_sysroot "$func_stripname_result"
+ func_append newdependency_libs " -R$func_replace_sysroot_result"
+ ;;
+ *) func_append newdependency_libs " $deplib" ;;
+ esac
+ done
+ dependency_libs=$newdependency_libs
+ newdlfiles=
+
+ for lib in $dlfiles; do
+ case $lib in
+ *.la)
+ func_basename "$lib"
+ name=$func_basename_result
+ eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+ test -z "$libdir" && \
+ func_fatal_error "'$lib' is not a valid libtool archive"
+ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ *) func_append newdlfiles " $lib" ;;
+ esac
+ done
+ dlfiles=$newdlfiles
+ newdlprefiles=
+ for lib in $dlprefiles; do
+ case $lib in
+ *.la)
+ # Only pass preopened files to the pseudo-archive (for
+ # eventual linking with the app. that links it) if we
+ # didn't already link the preopened objects directly into
+ # the library:
+ func_basename "$lib"
+ name=$func_basename_result
+ eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+ test -z "$libdir" && \
+ func_fatal_error "'$lib' is not a valid libtool archive"
+ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ esac
+ done
+ dlprefiles=$newdlprefiles
+ else
+ newdlfiles=
+ for lib in $dlfiles; do
+ case $lib in
+ [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+ func_append newdlfiles " $abs"
+ done
+ dlfiles=$newdlfiles
+ newdlprefiles=
+ for lib in $dlprefiles; do
+ case $lib in
+ [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+ func_append newdlprefiles " $abs"
+ done
+ dlprefiles=$newdlprefiles
+ fi
+ $RM $output
+ # place dlname in correct position for cygwin
+ # In fact, it would be nice if we could use this code for all target
+ # systems that can't hard-code library paths into their executables
+ # and that have no shared library path variable independent of PATH,
+ # but it turns out we can't easily determine that from inspecting
+ # libtool variables, so we have to hard-code the OSs to which it
+ # applies here; at the moment, that means platforms that use the PE
+ # object format with DLL files. See the long comment at the top of
+ # tests/bindir.at for full details.
+ tdlname=$dlname
+ case $host,$output,$installed,$module,$dlname in
+ *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *windows*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+ # If a -bindir argument was supplied, place the dll there.
+ if test -n "$bindir"; then
+ func_relative_path "$install_libdir" "$bindir"
+ tdlname=$func_relative_path_result/$dlname
+ else
+ # Otherwise fall back on heuristic.
+ tdlname=../bin/$dlname
+ fi
+ ;;
+ esac
+ $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that cannot go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+ if test no,yes = "$installed,$need_relink"; then
+ $ECHO >> $output "\
+relink_command=\"$relink_command\""
+ fi
+ done
+ }
+
+ # Do a symbolic link so that the libtool archive can be found in
+ # LD_LIBRARY_PATH before the program is installed.
+ func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+ ;;
+ esac
+ exit $EXIT_SUCCESS
+}
+
+if test link = "$opt_mode" || test relink = "$opt_mode"; then
+ func_mode_link ${1+"$@"}
+fi
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+ $debug_cmd
+
+ RM=$nonopt
+ files=
+ rmforce=false
+ exit_status=0
+
+ # This variable tells wrapper scripts just to set variables rather
+ # than running their programs.
+ libtool_install_magic=$magic
+
+ for arg
+ do
+ case $arg in
+ -f) func_append RM " $arg"; rmforce=: ;;
+ -*) func_append RM " $arg" ;;
+ *) func_append files " $arg" ;;
+ esac
+ done
+
+ test -z "$RM" && \
+ func_fatal_help "you must specify an RM program"
+
+ rmdirs=
+
+ for file in $files; do
+ func_dirname "$file" "" "."
+ dir=$func_dirname_result
+ if test . = "$dir"; then
+ odir=$objdir
+ else
+ odir=$dir/$objdir
+ fi
+ func_basename "$file"
+ name=$func_basename_result
+ test uninstall = "$opt_mode" && odir=$dir
+
+ # Remember odir for removal later, being careful to avoid duplicates
+ if test clean = "$opt_mode"; then
+ case " $rmdirs " in
+ *" $odir "*) ;;
+ *) func_append rmdirs " $odir" ;;
+ esac
+ fi
+
+ # Don't error if the file doesn't exist and rm -f was used.
+ if { test -L "$file"; } >/dev/null 2>&1 ||
+ { test -h "$file"; } >/dev/null 2>&1 ||
+ test -f "$file"; then
+ :
+ elif test -d "$file"; then
+ exit_status=1
+ continue
+ elif $rmforce; then
+ continue
+ fi
+
+ rmfiles=$file
+
+ case $name in
+ *.la)
+ # Possibly a libtool archive, so verify it.
+ if func_lalib_p "$file"; then
+ func_source $dir/$name
+
+ # Delete the libtool libraries and symlinks.
+ for n in $library_names; do
+ func_append rmfiles " $odir/$n"
+ done
+ test -n "$old_library" && func_append rmfiles " $odir/$old_library"
+
+ case $opt_mode in
+ clean)
+ case " $library_names " in
+ *" $dlname "*) ;;
+ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
+ esac
+ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
+ ;;
+ uninstall)
+ if test -n "$library_names"; then
+ # Do each command in the postuninstall commands.
+ func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1'
+ fi
+
+ if test -n "$old_library"; then
+ # Do each command in the old_postuninstall commands.
+ func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1'
+ fi
+ # FIXME: should reinstall the best remaining shared library.
+ ;;
+ esac
+ fi
+ ;;
+
+ *.lo)
+ # Possibly a libtool object, so verify it.
+ if func_lalib_p "$file"; then
+
+ # Read the .lo file
+ func_source $dir/$name
+
+ # Add PIC object to the list of files to remove.
+ if test -n "$pic_object" && test none != "$pic_object"; then
+ func_append rmfiles " $dir/$pic_object"
+ fi
+
+ # Add non-PIC object to the list of files to remove.
+ if test -n "$non_pic_object" && test none != "$non_pic_object"; then
+ func_append rmfiles " $dir/$non_pic_object"
+ fi
+ fi
+ ;;
+
+ *)
+ if test clean = "$opt_mode"; then
+ noexename=$name
+ case $file in
+ *.exe)
+ func_stripname '' '.exe' "$file"
+ file=$func_stripname_result
+ func_stripname '' '.exe' "$name"
+ noexename=$func_stripname_result
+ # $file with .exe has already been added to rmfiles,
+ # add $file without .exe
+ func_append rmfiles " $file"
+ ;;
+ esac
+ # Do a test to see if this is a libtool program.
+ if func_ltwrapper_p "$file"; then
+ if func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ relink_command=
+ func_source $func_ltwrapper_scriptname_result
+ func_append rmfiles " $func_ltwrapper_scriptname_result"
+ else
+ relink_command=
+ func_source $dir/$noexename
+ fi
+
+ # note $name still contains .exe if it was in $file originally
+ # as does the version of $file that was added into $rmfiles
+ func_append rmfiles " $odir/$name $odir/${name}S.$objext"
+ if test yes = "$fast_install" && test -n "$relink_command"; then
+ func_append rmfiles " $odir/lt-$name"
+ fi
+ if test "X$noexename" != "X$name"; then
+ func_append rmfiles " $odir/lt-$noexename.c"
+ fi
+ fi
+ fi
+ ;;
+ esac
+ func_show_eval "$RM $rmfiles" 'exit_status=1'
+ done
+
+ # Try to remove the $objdir's in the directories where we deleted files
+ for dir in $rmdirs; do
+ if test -d "$dir"; then
+ func_show_eval "rmdir $dir >/dev/null 2>&1"
+ fi
+ done
+
+ exit $exit_status
+}
+
+if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then
+ func_mode_uninstall ${1+"$@"}
+fi
+
+test -z "$opt_mode" && {
+ help=$generic_help
+ func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+ func_fatal_help "invalid operation mode '$opt_mode'"
+
+if test -n "$exec_cmd"; then
+ eval exec "$exec_cmd"
+ exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# where we disable both kinds of libraries. Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them. This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration. But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
diff --git a/build-aux/missing b/build-aux/missing
new file mode 100755
index 0000000..1fe1611
--- /dev/null
+++ b/build-aux/missing
@@ -0,0 +1,215 @@
+#! /bin/sh
+# Common wrapper for a few potentially missing GNU programs.
+
+scriptversion=2018-03-07.03; # UTC
+
+# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+# Originally written by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+if test $# -eq 0; then
+ echo 1>&2 "Try '$0 --help' for more information"
+ exit 1
+fi
+
+case $1 in
+
+ --is-lightweight)
+ # Used by our autoconf macros to check whether the available missing
+ # script is modern enough.
+ exit 0
+ ;;
+
+ --run)
+ # Back-compat with the calling convention used by older automake.
+ shift
+ ;;
+
+ -h|--h|--he|--hel|--help)
+ echo "\
+$0 [OPTION]... PROGRAM [ARGUMENT]...
+
+Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due
+to PROGRAM being missing or too old.
+
+Options:
+ -h, --help display this help and exit
+ -v, --version output version information and exit
+
+Supported PROGRAM values:
+ aclocal autoconf autoheader autom4te automake makeinfo
+ bison yacc flex lex help2man
+
+Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and
+'g' are ignored when checking the name.
+
+Send bug reports to <bug-automake@gnu.org>."
+ exit $?
+ ;;
+
+ -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
+ echo "missing $scriptversion (GNU Automake)"
+ exit $?
+ ;;
+
+ -*)
+ echo 1>&2 "$0: unknown '$1' option"
+ echo 1>&2 "Try '$0 --help' for more information"
+ exit 1
+ ;;
+
+esac
+
+# Run the given program, remember its exit status.
+"$@"; st=$?
+
+# If it succeeded, we are done.
+test $st -eq 0 && exit 0
+
+# Also exit now if we it failed (or wasn't found), and '--version' was
+# passed; such an option is passed most likely to detect whether the
+# program is present and works.
+case $2 in --version|--help) exit $st;; esac
+
+# Exit code 63 means version mismatch. This often happens when the user
+# tries to use an ancient version of a tool on a file that requires a
+# minimum version.
+if test $st -eq 63; then
+ msg="probably too old"
+elif test $st -eq 127; then
+ # Program was missing.
+ msg="missing on your system"
+else
+ # Program was found and executed, but failed. Give up.
+ exit $st
+fi
+
+perl_URL=https://www.perl.org/
+flex_URL=https://github.com/westes/flex
+gnu_software_URL=https://www.gnu.org/software
+
+program_details ()
+{
+ case $1 in
+ aclocal|automake)
+ echo "The '$1' program is part of the GNU Automake package:"
+ echo "<$gnu_software_URL/automake>"
+ echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:"
+ echo "<$gnu_software_URL/autoconf>"
+ echo "<$gnu_software_URL/m4/>"
+ echo "<$perl_URL>"
+ ;;
+ autoconf|autom4te|autoheader)
+ echo "The '$1' program is part of the GNU Autoconf package:"
+ echo "<$gnu_software_URL/autoconf/>"
+ echo "It also requires GNU m4 and Perl in order to run:"
+ echo "<$gnu_software_URL/m4/>"
+ echo "<$perl_URL>"
+ ;;
+ esac
+}
+
+give_advice ()
+{
+ # Normalize program name to check for.
+ normalized_program=`echo "$1" | sed '
+ s/^gnu-//; t
+ s/^gnu//; t
+ s/^g//; t'`
+
+ printf '%s\n' "'$1' is $msg."
+
+ configure_deps="'configure.ac' or m4 files included by 'configure.ac'"
+ case $normalized_program in
+ autoconf*)
+ echo "You should only need it if you modified 'configure.ac',"
+ echo "or m4 files included by it."
+ program_details 'autoconf'
+ ;;
+ autoheader*)
+ echo "You should only need it if you modified 'acconfig.h' or"
+ echo "$configure_deps."
+ program_details 'autoheader'
+ ;;
+ automake*)
+ echo "You should only need it if you modified 'Makefile.am' or"
+ echo "$configure_deps."
+ program_details 'automake'
+ ;;
+ aclocal*)
+ echo "You should only need it if you modified 'acinclude.m4' or"
+ echo "$configure_deps."
+ program_details 'aclocal'
+ ;;
+ autom4te*)
+ echo "You might have modified some maintainer files that require"
+ echo "the 'autom4te' program to be rebuilt."
+ program_details 'autom4te'
+ ;;
+ bison*|yacc*)
+ echo "You should only need it if you modified a '.y' file."
+ echo "You may want to install the GNU Bison package:"
+ echo "<$gnu_software_URL/bison/>"
+ ;;
+ lex*|flex*)
+ echo "You should only need it if you modified a '.l' file."
+ echo "You may want to install the Fast Lexical Analyzer package:"
+ echo "<$flex_URL>"
+ ;;
+ help2man*)
+ echo "You should only need it if you modified a dependency" \
+ "of a man page."
+ echo "You may want to install the GNU Help2man package:"
+ echo "<$gnu_software_URL/help2man/>"
+ ;;
+ makeinfo*)
+ echo "You should only need it if you modified a '.texi' file, or"
+ echo "any other file indirectly affecting the aspect of the manual."
+ echo "You might want to install the Texinfo package:"
+ echo "<$gnu_software_URL/texinfo/>"
+ echo "The spurious makeinfo call might also be the consequence of"
+ echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might"
+ echo "want to install GNU make:"
+ echo "<$gnu_software_URL/make/>"
+ ;;
+ *)
+ echo "You might have modified some files without having the proper"
+ echo "tools for further handling them. Check the 'README' file, it"
+ echo "often tells you about the needed prerequisites for installing"
+ echo "this package. You may also peek at any GNU archive site, in"
+ echo "case some other package contains this missing '$1' program."
+ ;;
+ esac
+}
+
+give_advice "$1" | sed -e '1s/^/WARNING: /' \
+ -e '2,$s/^/ /' >&2
+
+# Propagate the correct exit status (expected to be 127 for a program
+# not found, 63 for a program that failed due to version mismatch).
+exit $st
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC0"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/configure b/configure
index 09f32b3..7fc103c 100755
--- a/configure
+++ b/configure
@@ -8954,10 +8954,6 @@ _lt_linker_boilerplate=`cat conftest.err`
$RM -r conftest*
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
if test -n "$compiler"; then
lt_prog_compiler_no_builtin_flag=
diff --git a/docs/C.md b/docs/C.md
new file mode 100644
index 0000000..c5e2094
--- /dev/null
+++ b/docs/C.md
@@ -0,0 +1,43 @@
+# liboCusts
+
+## Description
+liboCusts is a library that provides a way to introduce locusts into your code.
+
+Why would you want to do that? I don't know.
+
+## Example
+```c
+#include "locusts.h"
+int main(void) {
+ introduce_locusts();
+ /* Locusts are now in your codebase! */
+ return 0;
+}
+```
+
+## Building
+Multiple build systems are supported.
+
+### Meson
+```sh
+meson setup builddir --buildtype=release
+meson compile -C builddir
+```
+
+### CMake
+```sh
+cmake -S . -B build
+cmake --build build
+```
+
+### Autoconf
+```sh
+autoreconf -fi
+./configure
+make
+```
+
+
+## License
+This project is licensed under the BSD Zero Clause License. See the [LICENSE](LICENSE) file for details.
+```
diff --git a/include/locusts.h b/include/locusts.h
index 870a0ad..316bfdb 100644
--- a/include/locusts.h
+++ b/include/locusts.h
@@ -1,3 +1,4 @@
+/** @file */
#ifndef LOCUSTS_H
#define LOCUSTS_H
@@ -5,12 +6,12 @@
extern "C" {
#endif
-/*
-Introduces locusts into your codebase.
-
-Note: locusts are not included with installing this library. Please source your
-own.
-*/
+/**
+ * @brief Add locusts to your codebase.
+ *
+ * NOTE: Locusts are not included in this library.
+ * You will have to provide your own.
+ */
void introduce_locusts(void);
#ifdef __cplusplus
diff --git a/meson.build b/meson.build
index c38fa14..6111db9 100644
--- a/meson.build
+++ b/meson.build
@@ -1,4 +1,4 @@
-project('locusts', 'c', version: '1.0.0', default_options: ['warning_level=3'])
+project('locusts', 'c', version: '1.0.0', default_options: ['warning_level=3'], license: '0BSD')
source = 'src/locusts.c'
header = 'include/locusts.h'
@@ -18,4 +18,4 @@ if not meson.is_subproject()
'Locust Testing Utility',
executable('test_locusts', 'tests/test_locusts.c', link_with: lib, dependencies: project_dep),
)
-endif \ No newline at end of file
+endif
diff --git a/src/locusts.c b/src/locusts.c
index 4f21f0c..84ff437 100644
--- a/src/locusts.c
+++ b/src/locusts.c
@@ -1,3 +1,4 @@
#include "locusts.h"
-void introduce_locusts(void) { return; } \ No newline at end of file
+void introduce_locusts(void) { return; }
+